commit 5e0a8fedd08ca58c591a06af56340056c053c8cd Author: 20918 <2091823062@qq.com> Date: Tue May 20 11:29:13 2025 +0800 Initial commit diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 0000000..02d796f --- /dev/null +++ b/.dockerignore @@ -0,0 +1,11 @@ +.vscode +ollama +app +macapp +dist +build +.env +.cache +test_data +.git + diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 0000000..b127984 --- /dev/null +++ b/.gitattributes @@ -0,0 +1,24 @@ +llama/**/*.cpp linguist-vendored +llama/**/*.hpp linguist-vendored +llama/**/*.h linguist-vendored +llama/**/*.c linguist-vendored +llama/**/*.cu linguist-vendored +llama/**/*.cuh linguist-vendored +llama/**/*.m linguist-vendored +llama/**/*.metal linguist-vendored + +ml/backend/**/*.c linguist-vendored +ml/backend/**/*.h linguist-vendored +ml/backend/**/*.cpp linguist-vendored +ml/backend/**/*.hpp linguist-vendored +ml/backend/**/*.cu linguist-vendored +ml/backend/**/*.cuh linguist-vendored +ml/backend/**/*.m linguist-vendored +ml/backend/**/*.metal linguist-vendored +ml/backend/**/CMakeLists.txt linguist-vendored + +llama/build-info.cpp linguist-generated +ml/backend/ggml/ggml/src/ggml-metal/ggml-metal-embed.s linguist-generated + +* text=auto +*.go text eol=lf diff --git a/.github/ISSUE_TEMPLATE/10_bug_report.yml b/.github/ISSUE_TEMPLATE/10_bug_report.yml new file mode 100644 index 0000000..4ac6fee --- /dev/null +++ b/.github/ISSUE_TEMPLATE/10_bug_report.yml @@ -0,0 +1,68 @@ +name: Bug report +labels: [bug] +description: Something isn't working right. +body: + - type: textarea + id: description + attributes: + label: What is the issue? + description: What happened? What did you expect to happen? + validations: + required: true + - type: textarea + id: logs + attributes: + label: Relevant log output + description: Please copy and paste any relevant log output. See [Troubleshooting Guide](https://github.com/ollama/ollama/blob/main/docs/troubleshooting.md#how-to-troubleshoot-issues) for details. + render: shell + validations: + required: false + - type: dropdown + id: os + attributes: + label: OS + description: Which operating system are you using? + multiple: true + options: + - Linux + - macOS + - Windows + - Docker + - WSL2 + validations: + required: false + - type: dropdown + id: gpu + attributes: + label: GPU + description: Which GPU are you using? + multiple: true + options: + - Nvidia + - AMD + - Intel + - Apple + - Other + validations: + required: false + - type: dropdown + id: cpu + attributes: + label: CPU + description: Which CPU are you using? + multiple: true + options: + - Intel + - AMD + - Apple + - Other + validations: + required: false + - type: input + id: version + attributes: + label: Ollama version + description: What version of Ollama are you using? (`ollama --version`) + placeholder: e.g., 0.1.32 + validations: + required: false diff --git a/.github/ISSUE_TEMPLATE/20_feature_request.md b/.github/ISSUE_TEMPLATE/20_feature_request.md new file mode 100644 index 0000000..e899721 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/20_feature_request.md @@ -0,0 +1,6 @@ +--- +name: Feature request +about: Request a new feature +labels: feature request +--- + diff --git a/.github/ISSUE_TEMPLATE/30_model_request.md b/.github/ISSUE_TEMPLATE/30_model_request.md new file mode 100644 index 0000000..c705a5f --- /dev/null +++ b/.github/ISSUE_TEMPLATE/30_model_request.md @@ -0,0 +1,5 @@ +--- +name: Model request +about: Request support for a new model to be added to Ollama +labels: model request +--- \ No newline at end of file diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml new file mode 100644 index 0000000..70d9aa3 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/config.yml @@ -0,0 +1,8 @@ +blank_issues_enabled: true +contact_links: + - name: Help + url: https://discord.com/invite/ollama + about: Please join our Discord server for help using Ollama + - name: Troubleshooting + url: https://github.com/ollama/ollama/blob/main/docs/faq.md#faq + about: See the FAQ for common issues and solutions diff --git a/.github/workflows/latest.yaml b/.github/workflows/latest.yaml new file mode 100644 index 0000000..4d47dd3 --- /dev/null +++ b/.github/workflows/latest.yaml @@ -0,0 +1,24 @@ +name: latest + +on: + release: + types: [released] + +jobs: + update-latest: + environment: release + runs-on: linux + steps: + - uses: actions/checkout@v4 + - name: Login to Docker Hub + uses: docker/login-action@v3 + with: + username: ${{ vars.DOCKER_USER }} + password: ${{ secrets.DOCKER_ACCESS_TOKEN }} + - name: Tag images as latest + env: + PUSH: "1" + shell: bash + run: | + export "VERSION=${GITHUB_REF_NAME#v}" + ./scripts/tag_latest.sh diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml new file mode 100644 index 0000000..f423106 --- /dev/null +++ b/.github/workflows/release.yaml @@ -0,0 +1,495 @@ +name: release + +on: + push: + tags: + - 'v*' + +env: + CGO_CFLAGS: '-O3' + CGO_CXXFLAGS: '-O3' + +jobs: + setup-environment: + runs-on: ubuntu-latest + environment: release + outputs: + GOFLAGS: ${{ steps.goflags.outputs.GOFLAGS }} + steps: + - uses: actions/checkout@v4 + - name: Set environment + id: goflags + run: | + echo GOFLAGS="'-ldflags=-w -s \"-X=github.com/ollama/ollama/version.Version=${GITHUB_REF_NAME#v}\" \"-X=github.com/ollama/ollama/server.mode=release\"'" >>$GITHUB_OUTPUT + + darwin-build: + runs-on: macos-13 + environment: release + needs: setup-environment + strategy: + matrix: + os: [darwin] + arch: [amd64, arm64] + env: + GOFLAGS: ${{ needs.setup-environment.outputs.GOFLAGS }} + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-go@v5 + with: + go-version-file: go.mod + - run: | + go build -o dist/ . + env: + GOOS: ${{ matrix.os }} + GOARCH: ${{ matrix.arch }} + CGO_ENABLED: 1 + CGO_CPPFLAGS: '-mmacosx-version-min=11.3' + - if: matrix.arch == 'amd64' + run: | + cmake --preset CPU -DCMAKE_OSX_DEPLOYMENT_TARGET=11.3 -DCMAKE_SYSTEM_PROCESSOR=x86_64 -DCMAKE_OSX_ARCHITECTURES=x86_64 + cmake --build --parallel --preset CPU + cmake --install build --component CPU --strip --parallel 8 + - uses: actions/upload-artifact@v4 + with: + name: build-${{ matrix.os }}-${{ matrix.arch }} + path: dist/* + + darwin-sign: + runs-on: macos-13 + environment: release + needs: darwin-build + steps: + - uses: actions/checkout@v4 + - run: | + echo $MACOS_SIGNING_KEY | base64 --decode > certificate.p12 + security create-keychain -p password build.keychain + security default-keychain -s build.keychain + security unlock-keychain -p password build.keychain + security import certificate.p12 -k build.keychain -P $MACOS_SIGNING_KEY_PASSWORD -T /usr/bin/codesign + security set-key-partition-list -S apple-tool:,apple:,codesign: -s -k password build.keychain + security set-keychain-settings -lut 3600 build.keychain + env: + MACOS_SIGNING_KEY: ${{ secrets.MACOS_SIGNING_KEY }} + MACOS_SIGNING_KEY_PASSWORD: ${{ secrets.MACOS_SIGNING_KEY_PASSWORD }} + - uses: actions/download-artifact@v4 + with: + name: build-darwin-amd64 + path: dist/darwin-amd64 + - uses: actions/download-artifact@v4 + with: + name: build-darwin-arm64 + path: dist/darwin-arm64 + - run: | + export VERSION=${GITHUB_REF_NAME#v} + ./scripts/build_darwin.sh sign macapp + env: + APPLE_IDENTITY: ${{ secrets.APPLE_IDENTITY }} + APPLE_PASSWORD: ${{ secrets.APPLE_PASSWORD }} + APPLE_TEAM_ID: ${{ vars.APPLE_TEAM_ID }} + APPLE_ID: ${{ vars.APPLE_ID }} + SDKROOT: /Applications/Xcode_14.1.0.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk + DEVELOPER_DIR: /Applications/Xcode_14.1.0.app/Contents/Developer + - uses: actions/upload-artifact@v4 + with: + name: dist-darwin + path: | + dist/Ollama-darwin.zip + dist/ollama-darwin.tgz + + windows-depends: + strategy: + matrix: + os: [windows] + arch: [amd64] + preset: ['CPU'] + include: + - os: windows + arch: amd64 + preset: 'CUDA 11' + install: https://developer.download.nvidia.com/compute/cuda/11.3.1/local_installers/cuda_11.3.1_465.89_win10.exe + cuda-version: '11.3' + - os: windows + arch: amd64 + preset: 'CUDA 12' + install: https://developer.download.nvidia.com/compute/cuda/12.8.0/local_installers/cuda_12.8.0_571.96_windows.exe + cuda-version: '12.8' + - os: windows + arch: amd64 + preset: 'ROCm 6' + install: https://download.amd.com/developer/eula/rocm-hub/AMD-Software-PRO-Edition-24.Q4-WinSvr2022-For-HIP.exe + rocm-version: '6.2' + runs-on: ${{ matrix.arch == 'arm64' && format('{0}-{1}', matrix.os, matrix.arch) || matrix.os }} + environment: release + env: + GOFLAGS: ${{ needs.setup-environment.outputs.GOFLAGS }} + steps: + - name: Install system dependencies + run: | + choco install -y --no-progress ccache ninja + ccache -o cache_dir=${{ github.workspace }}\.ccache + - if: startsWith(matrix.preset, 'CUDA ') || startsWith(matrix.preset, 'ROCm ') + id: cache-install + uses: actions/cache/restore@v4 + with: + path: | + C:\Program Files\NVIDIA GPU Computing Toolkit\CUDA + C:\Program Files\AMD\ROCm + key: ${{ matrix.install }} + - if: startsWith(matrix.preset, 'CUDA ') + name: Install CUDA ${{ matrix.cuda-version }} + run: | + $ErrorActionPreference = "Stop" + if ("${{ steps.cache-install.outputs.cache-hit }}" -ne 'true') { + Invoke-WebRequest -Uri "${{ matrix.install }}" -OutFile "install.exe" + $subpackages = @("cudart", "nvcc", "cublas", "cublas_dev") | Foreach-Object {"${_}_${{ matrix.cuda-version }}"} + Start-Process -FilePath .\install.exe -ArgumentList (@("-s") + $subpackages) -NoNewWindow -Wait + } + + $cudaPath = (Resolve-Path "C:\Program Files\NVIDIA GPU Computing Toolkit\CUDA\*").path + echo "$cudaPath\bin" | Out-File -FilePath $env:GITHUB_PATH -Encoding utf8 -Append + - if: startsWith(matrix.preset, 'ROCm') + name: Install ROCm ${{ matrix.rocm-version }} + run: | + $ErrorActionPreference = "Stop" + if ("${{ steps.cache-install.outputs.cache-hit }}" -ne 'true') { + Invoke-WebRequest -Uri "${{ matrix.install }}" -OutFile "install.exe" + Start-Process -FilePath .\install.exe -ArgumentList '-install' -NoNewWindow -Wait + } + + $hipPath = (Resolve-Path "C:\Program Files\AMD\ROCm\*").path + echo "$hipPath\bin" | Out-File -FilePath $env:GITHUB_PATH -Encoding utf8 -Append + echo "CC=$hipPath\bin\clang.exe" | Out-File -FilePath $env:GITHUB_ENV -Append + echo "CXX=$hipPath\bin\clang++.exe" | Out-File -FilePath $env:GITHUB_ENV -Append + - if: matrix.preset == 'CPU' + run: | + echo "CC=clang.exe" | Out-File -FilePath $env:GITHUB_ENV -Append + echo "CXX=clang++.exe" | Out-File -FilePath $env:GITHUB_ENV -Append + - if: ${{ !cancelled() && steps.cache-install.outputs.cache-hit != 'true' }} + uses: actions/cache/save@v4 + with: + path: | + C:\Program Files\NVIDIA GPU Computing Toolkit\CUDA + C:\Program Files\AMD\ROCm + key: ${{ matrix.install }} + - uses: actions/checkout@v4 + - uses: actions/cache@v4 + with: + path: ${{ github.workspace }}\.ccache + key: ccache-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.preset }} + - name: Build target "${{ matrix.preset }}" + run: | + Import-Module 'C:\Program Files (x86)\Microsoft Visual Studio\2019\Enterprise\Common7\Tools\Microsoft.VisualStudio.DevShell.dll' + Enter-VsDevShell -VsInstallPath 'C:\Program Files (x86)\Microsoft Visual Studio\2019\Enterprise' -SkipAutomaticLocation -DevCmdArguments '-arch=x64 -no_logo' + cmake --preset "${{ matrix.preset }}" + cmake --build --parallel --preset "${{ matrix.preset }}" + cmake --install build --component "${{ startsWith(matrix.preset, 'CUDA ') && 'CUDA' || startsWith(matrix.preset, 'ROCm ') && 'HIP' || 'CPU' }}" --strip --parallel 8 + env: + CMAKE_GENERATOR: Ninja + - uses: actions/upload-artifact@v4 + with: + name: depends-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.preset }} + path: dist\* + + windows-build: + strategy: + matrix: + os: [windows] + arch: [amd64, arm64] + runs-on: ${{ matrix.arch == 'arm64' && format('{0}-{1}', matrix.os, matrix.arch) || matrix.os }} + environment: release + needs: [setup-environment] + env: + GOFLAGS: ${{ needs.setup-environment.outputs.GOFLAGS }} + steps: + - name: Install AMD64 system dependencies + if: matrix.arch == 'amd64' + run: | + $ErrorActionPreference = "Stop" + Start-Process "C:\msys64\usr\bin\pacman.exe" -ArgumentList @("-S", "--noconfirm", "mingw-w64-clang-x86_64-gcc-compat", "mingw-w64-clang-x86_64-clang") -NoNewWindow -Wait + echo "C:\msys64\usr\bin" | Out-File -FilePath $env:GITHUB_PATH -Encoding utf8 -Append + echo "C:\msys64\clang64\bin" | Out-File -FilePath $env:GITHUB_PATH -Encoding utf8 -Append + - name: Install ARM64 system dependencies + if: matrix.arch == 'arm64' + run: | + $ErrorActionPreference = "Stop" + Set-ExecutionPolicy Bypass -Scope Process -Force + [System.Net.ServicePointManager]::SecurityProtocol = [System.Net.ServicePointManager]::SecurityProtocol -bor 3072 + iex ((New-Object System.Net.WebClient).DownloadString('https://community.chocolatey.org/install.ps1')) + echo "C:\ProgramData\chocolatey\bin" | Out-File -FilePath $env:GITHUB_PATH -Encoding utf8 -Append + + choco install -y --no-progress git gzip + echo "C:\Program Files\Git\cmd" | Out-File -FilePath $env:GITHUB_PATH -Encoding utf8 -Append + + Invoke-WebRequest -Uri "https://github.com/mstorsjo/llvm-mingw/releases/download/20240619/llvm-mingw-20240619-ucrt-aarch64.zip" -OutFile "${{ runner.temp }}\llvm-mingw-ucrt-aarch64.zip" + Expand-Archive -Path ${{ runner.temp }}\llvm-mingw-ucrt-aarch64.zip -DestinationPath "C:\Program Files\" + $installPath=(Resolve-Path -Path "C:\Program Files\llvm-mingw-*-ucrt-aarch64").path + echo $installPath\bin | Out-File -FilePath $env:GITHUB_PATH -Encoding utf8 -Append + - uses: actions/checkout@v4 + - uses: actions/setup-go@v5 + with: + go-version-file: go.mod + - run: | + go build -o dist/${{ matrix.os }}-${{ matrix.arch }}/ . + - if: matrix.arch == 'arm64' + run: | + Invoke-WebRequest -Uri "https://aka.ms/vs/17/release/vc_redist.arm64.exe" -OutFile "dist\windows-arm64\vc_redist.arm64.exe" + - run: | + $env:VERSION='${{ github.ref_name }}' -Replace "v(.*)", '$1' + & .\scripts\build_windows.ps1 buildApp + env: + VCToolsRedistDir: stub + - uses: actions/upload-artifact@v4 + with: + name: build-${{ matrix.os }}-${{ matrix.arch }} + path: | + dist\${{ matrix.os }}-${{ matrix.arch }}\*.exe + dist\${{ matrix.os }}-${{ matrix.arch }}-app.exe + + windows-sign: + runs-on: windows-2022 + environment: release + needs: [windows-depends, windows-build] + steps: + - uses: actions/checkout@v4 + - uses: google-github-actions/auth@v2 + with: + project_id: ollama + credentials_json: ${{ secrets.GOOGLE_SIGNING_CREDENTIALS }} + - run: | + $ErrorActionPreference = "Stop" + Invoke-WebRequest -Uri "https://go.microsoft.com/fwlink/p/?LinkId=323507" -OutFile "${{ runner.temp }}\sdksetup.exe" + Start-Process "${{ runner.temp }}\sdksetup.exe" -ArgumentList @("/q") -NoNewWindow -Wait + + Invoke-WebRequest -Uri "https://github.com/GoogleCloudPlatform/kms-integrations/releases/download/cng-v1.0/kmscng-1.0-windows-amd64.zip" -OutFile "${{ runner.temp }}\plugin.zip" + Expand-Archive -Path "${{ runner.temp }}\plugin.zip" -DestinationPath "${{ runner.temp }}\plugin\" + & "${{ runner.temp }}\plugin\*\kmscng.msi" /quiet + + echo "${{ vars.OLLAMA_CERT }}" >ollama_inc.crt + - uses: actions/download-artifact@v4 + with: + pattern: build-windows-* + path: dist\ + merge-multiple: true + - uses: actions/download-artifact@v4 + with: + pattern: depends-windows-amd64-* + path: dist\windows-amd64\ + merge-multiple: true + - run: | + & .\scripts\build_windows.ps1 gatherDependencies sign buildInstaller distZip + env: + KEY_CONTAINER: ${{ vars.KEY_CONTAINER }} + - uses: actions/upload-artifact@v4 + with: + name: dist-windows + path: | + dist\OllamaSetup.exe + dist\ollama-windows-*.zip + + linux-build: + strategy: + matrix: + include: + - os: linux + arch: amd64 + target: archive + - os: linux + arch: amd64 + target: rocm + - os: linux + arch: arm64 + target: archive + runs-on: ${{ matrix.arch == 'arm64' && format('{0}-{1}', matrix.os, matrix.arch) || matrix.os }} + environment: release + needs: setup-environment + env: + GOFLAGS: ${{ needs.setup-environment.outputs.GOFLAGS }} + steps: + - uses: actions/checkout@v4 + - uses: docker/setup-buildx-action@v3 + - uses: docker/build-push-action@v6 + with: + context: . + platforms: ${{ matrix.os }}/${{ matrix.arch }} + target: ${{ matrix.target }} + build-args: | + GOFLAGS=${{ env.GOFLAGS }} + CGO_CFLAGS=${{ env.CGO_CFLAGS }} + CGO_CXXFLAGS=${{ env.CGO_CXXFLAGS }} + outputs: type=local,dest=dist/${{ matrix.os }}-${{ matrix.arch }} + cache-from: type=registry,ref=ollama/ollama:latest + cache-to: type=inline + - run: | + for COMPONENT in bin/* lib/ollama/*; do + case "$COMPONENT" in + bin/ollama) echo $COMPONENT >>ollama-${{ matrix.os }}-${{ matrix.arch }}.tar.in ;; + lib/ollama/*.so) echo $COMPONENT >>ollama-${{ matrix.os }}-${{ matrix.arch }}.tar.in ;; + lib/ollama/cuda_v11) echo $COMPONENT >>ollama-${{ matrix.os }}-${{ matrix.arch }}.tar.in ;; + lib/ollama/cuda_v12) echo $COMPONENT >>ollama-${{ matrix.os }}-${{ matrix.arch }}.tar.in ;; + lib/ollama/cuda_jetpack5) echo $COMPONENT >>ollama-${{ matrix.os }}-${{ matrix.arch }}-jetpack5.tar.in ;; + lib/ollama/cuda_jetpack6) echo $COMPONENT >>ollama-${{ matrix.os }}-${{ matrix.arch }}-jetpack6.tar.in ;; + lib/ollama/rocm) echo $COMPONENT >>ollama-${{ matrix.os }}-${{ matrix.arch }}-rocm.tar.in ;; + esac + done + working-directory: dist/${{ matrix.os }}-${{ matrix.arch }} + - run: | + for ARCHIVE in dist/${{ matrix.os }}-${{ matrix.arch }}/*.tar.in; do + tar c -C dist/${{ matrix.os }}-${{ matrix.arch }} -T $ARCHIVE --owner 0 --group 0 | pigz -9vc >$(basename ${ARCHIVE//.*/}.tgz); + done + - uses: actions/upload-artifact@v4 + with: + name: dist-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.target }} + path: | + *.tgz + + # Build each Docker variant (OS, arch, and flavor) separately. Using QEMU is unreliable and slower. + docker-build-push: + strategy: + matrix: + include: + - os: linux + arch: arm64 + build-args: | + CGO_CFLAGS + CGO_CXXFLAGS + GOFLAGS + - os: linux + arch: amd64 + build-args: | + CGO_CFLAGS + CGO_CXXFLAGS + GOFLAGS + - os: linux + arch: amd64 + suffix: '-rocm' + build-args: | + CGO_CFLAGS + CGO_CXXFLAGS + GOFLAGS + FLAVOR=rocm + runs-on: ${{ matrix.arch == 'arm64' && format('{0}-{1}', matrix.os, matrix.arch) || matrix.os }} + environment: release + needs: setup-environment + env: + GOFLAGS: ${{ needs.setup-environment.outputs.GOFLAGS }} + steps: + - uses: actions/checkout@v4 + - uses: docker/setup-buildx-action@v3 + - uses: docker/login-action@v3 + with: + username: ${{ vars.DOCKER_USER }} + password: ${{ secrets.DOCKER_ACCESS_TOKEN }} + - id: build-push + uses: docker/build-push-action@v6 + with: + context: . + platforms: ${{ matrix.os }}/${{ matrix.arch }} + build-args: ${{ matrix.build-args }} + outputs: type=image,name=ollama/ollama,push-by-digest=true,name-canonical=true,push=true + cache-from: type=registry,ref=ollama/ollama:latest + cache-to: type=inline + - run: | + mkdir -p ${{ matrix.os }}-${{ matrix.arch }} + echo "${{ steps.build-push.outputs.digest }}" >${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.suffix }}.txt + working-directory: ${{ runner.temp }} + - uses: actions/upload-artifact@v4 + with: + name: digest-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.suffix }} + path: | + ${{ runner.temp }}/${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.suffix }}.txt + + # Merge Docker images for the same flavor into a single multi-arch manifest + docker-merge-push: + strategy: + matrix: + suffix: ['', '-rocm'] + runs-on: linux + environment: release + needs: [docker-build-push] + steps: + - uses: docker/login-action@v3 + with: + username: ${{ vars.DOCKER_USER }} + password: ${{ secrets.DOCKER_ACCESS_TOKEN }} + - id: metadata + uses: docker/metadata-action@v4 + with: + flavor: | + latest=false + suffix=${{ matrix.suffix }} + images: | + ollama/ollama + tags: | + type=ref,enable=true,priority=600,prefix=pr-,event=pr + type=semver,pattern={{version}} + - uses: actions/download-artifact@v4 + with: + pattern: digest-* + path: ${{ runner.temp }} + merge-multiple: true + - run: | + docker buildx imagetools create $(echo '${{ steps.metadata.outputs.json }}' | jq -cr '.tags | map("-t", .) | join(" ")') $(cat *-${{ matrix.suffix }}.txt | xargs printf 'ollama/ollama@%s ') + docker buildx imagetools inspect ollama/ollama:${{ steps.metadata.outputs.version }} + working-directory: ${{ runner.temp }} + + # Trigger downstream release process + trigger: + runs-on: ubuntu-latest + environment: release + needs: [darwin-build, windows-build, windows-depends] + steps: + - name: Trigger downstream release process + run: | + curl -L \ + -X POST \ + -H "Accept: application/vnd.github+json" \ + -H "Authorization: Bearer ${{ secrets.RELEASE_TOKEN }}" \ + -H "X-GitHub-Api-Version: 2022-11-28" \ + https://api.github.com/repos/ollama/${{ vars.RELEASE_REPO }}/dispatches \ + -d "{\"event_type\": \"trigger-workflow\", \"client_payload\": {\"run_id\": \"${GITHUB_RUN_ID}\", \"version\": \"${GITHUB_REF_NAME#v}\"}}" + + # Aggregate all the assets and ship a release + release: + needs: [darwin-sign, windows-sign, linux-build] + runs-on: linux + environment: release + permissions: + contents: write + env: + GH_TOKEN: ${{ github.token }} + steps: + - uses: actions/checkout@v4 + - uses: actions/download-artifact@v4 + with: + name: dist-darwin + path: dist + - uses: actions/download-artifact@v4 + with: + name: dist-windows + path: dist + - uses: actions/download-artifact@v4 + with: + pattern: dist-linux-* + path: dist + merge-multiple: true + - run: find . -type f -not -name 'sha256sum.txt' | xargs sha256sum | tee sha256sum.txt + working-directory: dist + - name: Create or update Release + run: | + RELEASE_VERSION="$(echo ${GITHUB_REF_NAME} | cut -f1 -d-)" + + echo "Looking for existing release for ${RELEASE_VERSION}" + OLD_TAG=$(gh release ls --json name,tagName | jq -r ".[] | select(.name == \"${RELEASE_VERSION}\") | .tagName") + if [ -n "$OLD_TAG" ]; then + echo "Updating release ${RELEASE_VERSION} to point to new tag ${GITHUB_REF_NAME}" + gh release edit ${OLD_TAG} --tag ${GITHUB_REF_NAME} + else + echo "Creating new release ${RELEASE_VERSION} pointing to tag ${GITHUB_REF_NAME}" + gh release create ${GITHUB_REF_NAME} \ + --title ${RELEASE_VERSION} \ + --draft \ + --generate-notes \ + --prerelease + fi + echo "Uploading artifacts for tag ${GITHUB_REF_NAME}" + gh release upload ${GITHUB_REF_NAME} dist/* --clobber diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml new file mode 100644 index 0000000..27e229f --- /dev/null +++ b/.github/workflows/test.yaml @@ -0,0 +1,241 @@ +name: test + +concurrency: + # For PRs, later CI runs preempt previous ones. e.g. a force push on a PR + # cancels running CI jobs and starts all new ones. + # + # For non-PR pushes, concurrency.group needs to be unique for every distinct + # CI run we want to have happen. Use run_id, which in practice means all + # non-PR CI runs will be allowed to run without preempting each other. + group: ${{ github.workflow }}-$${{ github.pull_request.number || github.run_id }} + cancel-in-progress: true + +on: + pull_request: + paths: + - '**/*' + - '!docs/**' + - '!README.md' + +jobs: + changes: + runs-on: ubuntu-latest + outputs: + changed: ${{ steps.changes.outputs.changed }} + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + - id: changes + run: | + changed() { + local BASE=${{ github.event.pull_request.base.sha }} + local HEAD=${{ github.event.pull_request.head.sha }} + local MERGE_BASE=$(git merge-base $BASE $HEAD) + git diff-tree -r --no-commit-id --name-only "$MERGE_BASE" "$HEAD" \ + | xargs python3 -c "import sys; from pathlib import Path; print(any(Path(x).match(glob) for x in sys.argv[1:] for glob in '$*'.split(' ')))" + } + + echo changed=$(changed 'llama/llama.cpp/**' 'ml/backend/ggml/ggml/**') | tee -a $GITHUB_OUTPUT + + linux: + needs: [changes] + if: needs.changes.outputs.changed == 'True' + strategy: + matrix: + include: + - preset: CPU + - preset: CUDA + container: nvidia/cuda:11.8.0-devel-ubuntu22.04 + flags: '-DCMAKE_CUDA_ARCHITECTURES=87' + - preset: ROCm + container: rocm/dev-ubuntu-22.04:6.1.2 + extra-packages: rocm-libs + flags: '-DAMDGPU_TARGETS=gfx1010 -DCMAKE_PREFIX_PATH=/opt/rocm' + runs-on: linux + container: ${{ matrix.container }} + steps: + - uses: actions/checkout@v4 + - run: | + [ -n "${{ matrix.container }}" ] || sudo=sudo + $sudo apt-get update + $sudo apt-get install -y cmake ccache ${{ matrix.extra-packages }} + env: + DEBIAN_FRONTEND: noninteractive + - uses: actions/cache@v4 + with: + path: /github/home/.cache/ccache + key: ccache-${{ runner.os }}-${{ runner.arch }}-${{ matrix.preset }} + - run: | + cmake --preset ${{ matrix.preset }} ${{ matrix.flags }} + cmake --build --preset ${{ matrix.preset }} --parallel + + windows: + needs: [changes] + if: needs.changes.outputs.changed == 'True' + strategy: + matrix: + include: + - preset: CPU + - preset: CUDA + install: https://developer.download.nvidia.com/compute/cuda/11.3.1/local_installers/cuda_11.3.1_465.89_win10.exe + flags: '-DCMAKE_CUDA_ARCHITECTURES=80' + - preset: ROCm + install: https://download.amd.com/developer/eula/rocm-hub/AMD-Software-PRO-Edition-24.Q4-WinSvr2022-For-HIP.exe + flags: '-DAMDGPU_TARGETS=gfx1010' + runs-on: windows + steps: + - run: | + choco install -y --no-progress ccache ninja + ccache -o cache_dir=${{ github.workspace }}\.ccache + - if: matrix.preset == 'CUDA' || matrix.preset == 'ROCm' + id: cache-install + uses: actions/cache/restore@v4 + with: + path: | + C:\Program Files\NVIDIA GPU Computing Toolkit\CUDA + C:\Program Files\AMD\ROCm + key: ${{ matrix.install }} + - if: matrix.preset == 'CUDA' + name: Install CUDA ${{ matrix.cuda-version }} + run: | + $ErrorActionPreference = "Stop" + if ("${{ steps.cache-install.outputs.cache-hit }}" -ne 'true') { + Invoke-WebRequest -Uri "${{ matrix.install }}" -OutFile "install.exe" + Start-Process -FilePath .\install.exe -ArgumentList (@("-s", "cudart_11.3", "nvcc_11.3", "cublas_11.3", "cublas_dev_11.3")) -NoNewWindow -Wait + } + + $cudaPath = (Resolve-Path "C:\Program Files\NVIDIA GPU Computing Toolkit\CUDA\*").path + echo "$cudaPath\bin" | Out-File -FilePath $env:GITHUB_PATH -Encoding utf8 -Append + - if: matrix.preset == 'ROCm' + name: Install ROCm ${{ matrix.rocm-version }} + run: | + $ErrorActionPreference = "Stop" + if ("${{ steps.cache-install.outputs.cache-hit }}" -ne 'true') { + Invoke-WebRequest -Uri "${{ matrix.install }}" -OutFile "install.exe" + Start-Process -FilePath .\install.exe -ArgumentList '-install' -NoNewWindow -Wait + } + + $hipPath = (Resolve-Path "C:\Program Files\AMD\ROCm\*").path + echo "$hipPath\bin" | Out-File -FilePath $env:GITHUB_PATH -Encoding utf8 -Append + echo "CC=$hipPath\bin\clang.exe" | Out-File -FilePath $env:GITHUB_ENV -Append + echo "CXX=$hipPath\bin\clang++.exe" | Out-File -FilePath $env:GITHUB_ENV -Append + - if: ${{ !cancelled() && steps.cache-install.outputs.cache-hit != 'true' }} + uses: actions/cache/save@v4 + with: + path: | + C:\Program Files\NVIDIA GPU Computing Toolkit\CUDA + C:\Program Files\AMD\ROCm + key: ${{ matrix.install }} + - uses: actions/checkout@v4 + - uses: actions/cache@v4 + with: + path: ${{ github.workspace }}\.ccache + key: ccache-${{ runner.os }}-${{ runner.arch }}-${{ matrix.preset }} + - run: | + Import-Module 'C:\Program Files (x86)\Microsoft Visual Studio\2019\Enterprise\Common7\Tools\Microsoft.VisualStudio.DevShell.dll' + Enter-VsDevShell -VsInstallPath 'C:\Program Files (x86)\Microsoft Visual Studio\2019\Enterprise' -SkipAutomaticLocation -DevCmdArguments '-arch=x64 -no_logo' + cmake --preset "${{ matrix.preset }}" ${{ matrix.flags }} + cmake --build --parallel --preset "${{ matrix.preset }}" + env: + CMAKE_GENERATOR: Ninja + + go_mod_tidy: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - name: check that 'go mod tidy' is clean + run: go mod tidy --diff || (echo "Please run 'go mod tidy'." && exit 1) + + test: + strategy: + matrix: + os: [ubuntu-latest, macos-latest, windows-latest] + runs-on: ${{ matrix.os }} + env: + CGO_ENABLED: '1' + GOEXPERIMENT: 'synctest' + steps: + - name: checkout + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # 4.2.2 + + - name: cache restore + uses: actions/cache/restore@1bd1e32a3bdc45362d1e726936510720a7c30a57 # v4.2.0 + with: + # Note: unlike the other setups, this is only grabbing the mod download + # cache, rather than the whole mod directory, as the download cache + # contains zips that can be unpacked in parallel faster than they can be + # fetched and extracted by tar + path: | + ~/.cache/go-build + ~/go/pkg/mod/cache + ~\AppData\Local\go-build + # NOTE: The -3- here should be incremented when the scheme of data to be + # cached changes (e.g. path above changes). + key: ${{ github.job }}-${{ runner.os }}-${{ matrix.goarch }}-${{ matrix.buildflags }}-go-3-${{ hashFiles('**/go.sum') }}-${{ github.run_id }} + restore-keys: | + ${{ github.job }}-${{ runner.os }}-${{ matrix.goarch }}-${{ matrix.buildflags }}-go-3-${{ hashFiles('**/go.sum') }} + ${{ github.job }}-${{ runner.os }}-${{ matrix.goarch }}-${{ matrix.buildflags }}-go-3- + + - name: Setup Go + uses: actions/setup-go@v5 + with: + # The caching strategy of setup-go is less than ideal, and wastes + # time by not saving artifacts due to small failures like the linter + # complaining, etc. This means subsequent have to rebuild their world + # again until all checks pass. For instance, if you mispell a word, + # you're punished until you fix it. This is more hostile than + # helpful. + cache: false + + go-version-file: go.mod + + # It is tempting to run this in a platform independent way, but the past + # shows this codebase will see introductions of platform specific code + # generation, and so we need to check this per platform to ensure we + # don't abuse go generate on specific platforms. + - name: check that 'go generate' is clean + if: always() + run: | + go generate ./... + git diff --name-only --exit-code || (echo "Please run 'go generate ./...'." && exit 1) + + - name: go test + if: always() + run: go test -count=1 -benchtime=1x ./... + + # TODO(bmizerany): replace this heavy tool with just the + # tools/checks/binaries we want and then make them all run in parallel + # across jobs, not on a single tiny vm on Github Actions. + - uses: golangci/golangci-lint-action@v6 + with: + args: --timeout 10m0s -v + + - name: cache save + # Always save the cache, even if the job fails. The artifacts produced + # during the building of test binaries are not all for naught. They can + # be used to speed up subsequent runs. + if: always() + + uses: actions/cache/save@1bd1e32a3bdc45362d1e726936510720a7c30a57 # v4.2.0 + with: + # Note: unlike the other setups, this is only grabbing the mod download + # cache, rather than the whole mod directory, as the download cache + # contains zips that can be unpacked in parallel faster than they can be + # fetched and extracted by tar + path: | + ~/.cache/go-build + ~/go/pkg/mod/cache + ~\AppData\Local\go-build + # NOTE: The -3- here should be incremented when the scheme of data to be + # cached changes (e.g. path above changes). + key: ${{ github.job }}-${{ runner.os }}-${{ matrix.goarch }}-${{ matrix.buildflags }}-go-3-${{ hashFiles('**/go.sum') }}-${{ github.run_id }} + + patches: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - name: Verify patches apply cleanly and do not change files + run: | + make -f Makefile.sync clean checkout apply-patches sync + git diff --compact-summary --exit-code \ No newline at end of file diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..3a2af0b --- /dev/null +++ b/.gitignore @@ -0,0 +1,16 @@ +.DS_Store +.vscode +.env +.venv +.swp +dist +build +.cache +*.exe +.idea +test_data +*.crt +__debug_bin* +llama/build +llama/vendor +/ollama diff --git a/.golangci.yaml b/.golangci.yaml new file mode 100644 index 0000000..9d6705b --- /dev/null +++ b/.golangci.yaml @@ -0,0 +1,41 @@ +run: + timeout: 5m +linters: + enable: + - asasalint + - bidichk + - bodyclose + - containedctx + - gocheckcompilerdirectives + - gofmt + - gofumpt + - gosimple + - govet + - ineffassign + - intrange + - makezero + - misspell + - nilerr + - nolintlint + - nosprintfhostport + - staticcheck + - unconvert + - usetesting + - wastedassign + - whitespace + disable: + - usestdlibvars + - errcheck +linters-settings: + staticcheck: + checks: + - all + - -SA1019 # omit Deprecated check +severity: + default-severity: error + rules: + - linters: + - gofmt + - goimports + - intrange + severity: info diff --git a/CMakeLists.txt b/CMakeLists.txt new file mode 100644 index 0000000..5343d87 --- /dev/null +++ b/CMakeLists.txt @@ -0,0 +1,133 @@ +cmake_minimum_required(VERSION 3.21) + +project(Ollama C CXX) + +include(CheckLanguage) + +find_package(Threads REQUIRED) + +set(CMAKE_BUILD_TYPE Release) +set(BUILD_SHARED_LIBS ON) + +set(CMAKE_CXX_STANDARD 17) +set(CMAKE_CXX_STANDARD_REQUIRED ON) +set(CMAKE_CXX_EXTENSIONS OFF) + +set(GGML_BUILD ON) +set(GGML_SHARED ON) +set(GGML_CCACHE ON) +set(GGML_BACKEND_DL ON) +set(GGML_BACKEND_SHARED ON) +set(GGML_SCHED_MAX_COPIES 4) + +set(GGML_LLAMAFILE ON) +set(GGML_CUDA_PEER_MAX_BATCH_SIZE 128) +set(GGML_CUDA_GRAPHS ON) +set(GGML_CUDA_FA ON) +set(GGML_CUDA_COMPRESSION_MODE default) + +if((CMAKE_OSX_ARCHITECTURES AND NOT CMAKE_OSX_ARCHITECTURES MATCHES "arm64") + OR (NOT CMAKE_OSX_ARCHITECTURES AND NOT CMAKE_SYSTEM_PROCESSOR MATCHES "arm|aarch64|ARM64|ARMv[0-9]+")) + set(GGML_CPU_ALL_VARIANTS ON) +endif() + +if (CMAKE_OSX_ARCHITECTURES MATCHES "x86_64") + set(CMAKE_BUILD_RPATH "@loader_path") + set(CMAKE_INSTALL_RPATH "@loader_path") +endif() + +set(OLLAMA_BUILD_DIR ${CMAKE_BINARY_DIR}/lib/ollama) +set(OLLAMA_INSTALL_DIR ${CMAKE_INSTALL_PREFIX}/lib/ollama) + +set(CMAKE_RUNTIME_OUTPUT_DIRECTORY ${OLLAMA_BUILD_DIR}) +set(CMAKE_RUNTIME_OUTPUT_DIRECTORY_DEBUG ${OLLAMA_BUILD_DIR}) +set(CMAKE_RUNTIME_OUTPUT_DIRECTORY_RELEASE ${OLLAMA_BUILD_DIR}) +set(CMAKE_LIBRARY_OUTPUT_DIRECTORY ${OLLAMA_BUILD_DIR}) +set(CMAKE_LIBRARY_OUTPUT_DIRECTORY_DEBUG ${OLLAMA_BUILD_DIR}) +set(CMAKE_LIBRARY_OUTPUT_DIRECTORY_RELEASE ${OLLAMA_BUILD_DIR}) + +include_directories(${CMAKE_CURRENT_SOURCE_DIR}/ml/backend/ggml/ggml/src) +include_directories(${CMAKE_CURRENT_SOURCE_DIR}/ml/backend/ggml/ggml/src/include) +include_directories(${CMAKE_CURRENT_SOURCE_DIR}/ml/backend/ggml/ggml/src/ggml-cpu) +include_directories(${CMAKE_CURRENT_SOURCE_DIR}/ml/backend/ggml/ggml/src/ggml-cpu/amx) + +set(GGML_CPU ON) +add_subdirectory(${CMAKE_CURRENT_SOURCE_DIR}/ml/backend/ggml/ggml/src) +set_property(TARGET ggml PROPERTY EXCLUDE_FROM_ALL TRUE) + +get_target_property(CPU_VARIANTS ggml-cpu MANUALLY_ADDED_DEPENDENCIES) +if(NOT CPU_VARIANTS) + set(CPU_VARIANTS "ggml-cpu") +endif() + +install(TARGETS ggml-base ${CPU_VARIANTS} + RUNTIME_DEPENDENCIES + PRE_EXCLUDE_REGEXES ".*" + RUNTIME DESTINATION ${OLLAMA_INSTALL_DIR} COMPONENT CPU + LIBRARY DESTINATION ${OLLAMA_INSTALL_DIR} COMPONENT CPU + FRAMEWORK DESTINATION ${OLLAMA_INSTALL_DIR} COMPONENT CPU +) + +check_language(CUDA) +if(CMAKE_CUDA_COMPILER) + if(CMAKE_VERSION VERSION_GREATER_EQUAL "3.24" AND NOT CMAKE_CUDA_ARCHITECTURES) + set(CMAKE_CUDA_ARCHITECTURES "native") + endif() + + find_package(CUDAToolkit) + add_subdirectory(${CMAKE_CURRENT_SOURCE_DIR}/ml/backend/ggml/ggml/src/ggml-cuda) + set(OLLAMA_CUDA_INSTALL_DIR ${OLLAMA_INSTALL_DIR}/cuda_v${CUDAToolkit_VERSION_MAJOR}) + install(TARGETS ggml-cuda + RUNTIME_DEPENDENCIES + DIRECTORIES ${CUDAToolkit_BIN_DIR} ${CUDAToolkit_LIBRARY_DIR} + PRE_INCLUDE_REGEXES cublas cublasLt cudart + PRE_EXCLUDE_REGEXES ".*" + RUNTIME DESTINATION ${OLLAMA_CUDA_INSTALL_DIR} COMPONENT CUDA + LIBRARY DESTINATION ${OLLAMA_CUDA_INSTALL_DIR} COMPONENT CUDA + ) +endif() + +set(WINDOWS_AMDGPU_TARGETS_EXCLUDE_REGEX "^gfx(906|908|90a|1200|1201):xnack[+-]$" + CACHE STRING + "Regular expression describing AMDGPU_TARGETS not supported on Windows. Override to force building these targets. Default \"^gfx(906|908|90a|1200|1201):xnack[+-]$\"." +) + +check_language(HIP) +if(CMAKE_HIP_COMPILER) + set(HIP_PLATFORM "amd") + + find_package(hip REQUIRED) + if(NOT AMDGPU_TARGETS) + list(FILTER AMDGPU_TARGETS INCLUDE REGEX "^gfx(900|94[012]|101[02]|1030|110[012]|120[01])$") + elseif(WIN32 AND WINDOWS_AMDGPU_TARGETS_EXCLUDE_REGEX) + list(FILTER AMDGPU_TARGETS EXCLUDE REGEX ${WINDOWS_AMDGPU_TARGETS_EXCLUDE_REGEX}) + endif() + + if(AMDGPU_TARGETS) + add_subdirectory(${CMAKE_CURRENT_SOURCE_DIR}/ml/backend/ggml/ggml/src/ggml-hip) + + if (WIN32) + target_compile_definitions(ggml-hip PRIVATE GGML_CUDA_NO_PEER_COPY) + endif() + + target_compile_definitions(ggml-hip PRIVATE GGML_HIP_NO_VMM) + + set(OLLAMA_HIP_INSTALL_DIR ${OLLAMA_INSTALL_DIR}/rocm) + install(TARGETS ggml-hip + RUNTIME_DEPENDENCIES + DIRECTORIES ${HIP_BIN_INSTALL_DIR} ${HIP_LIB_INSTALL_DIR} + PRE_INCLUDE_REGEXES hipblas rocblas amdhip64 rocsolver amd_comgr hsa-runtime64 rocsparse tinfo rocprofiler-register drm drm_amdgpu numa elf + PRE_EXCLUDE_REGEXES ".*" + POST_EXCLUDE_REGEXES "system32" + RUNTIME DESTINATION ${OLLAMA_HIP_INSTALL_DIR} COMPONENT HIP + LIBRARY DESTINATION ${OLLAMA_HIP_INSTALL_DIR} COMPONENT HIP + ) + + foreach(HIP_LIB_BIN_INSTALL_DIR IN ITEMS ${HIP_BIN_INSTALL_DIR} ${HIP_LIB_INSTALL_DIR}) + if(EXISTS ${HIP_LIB_BIN_INSTALL_DIR}/rocblas) + install(DIRECTORY ${HIP_LIB_BIN_INSTALL_DIR}/rocblas DESTINATION ${OLLAMA_HIP_INSTALL_DIR} COMPONENT HIP) + break() + endif() + endforeach() + endif() +endif() diff --git a/CMakePresets.json b/CMakePresets.json new file mode 100644 index 0000000..0b70d8b --- /dev/null +++ b/CMakePresets.json @@ -0,0 +1,112 @@ +{ + "version": 3, + "configurePresets": [ + { + "name": "Default", + "binaryDir": "${sourceDir}/build", + "installDir": "${sourceDir}/dist", + "cacheVariables": { + "CMAKE_BUILD_TYPE": "Release" + } + }, + { + "name": "CPU", + "inherits": [ "Default" ] + }, + { + "name": "CUDA", + "inherits": [ "Default" ] + }, + { + "name": "CUDA 11", + "inherits": [ "CUDA" ], + "cacheVariables": { + "CMAKE_CUDA_ARCHITECTURES": "50;52;53;60;61;70;75;80;86", + "CMAKE_CUDA_FLAGS": "-Wno-deprecated-gpu-targets" + } + }, + { + "name": "CUDA 12", + "inherits": [ "CUDA" ], + "cacheVariables": { + "CMAKE_CUDA_ARCHITECTURES": "50;60;61;70;75;80;86;87;89;90;90a;120", + "CMAKE_CUDA_FLAGS": "-Wno-deprecated-gpu-targets" + } + }, + { + "name": "JetPack 5", + "inherits": [ "CUDA" ], + "cacheVariables": { + "CMAKE_CUDA_ARCHITECTURES": "72;87" + } + }, + { + "name": "JetPack 6", + "inherits": [ "CUDA" ], + "cacheVariables": { + "CMAKE_CUDA_ARCHITECTURES": "87" + } + }, + { + "name": "ROCm", + "inherits": [ "Default" ], + "cacheVariables": { + "CMAKE_HIP_PLATFORM": "amd" + } + }, + { + "name": "ROCm 6", + "inherits": [ "ROCm" ], + "cacheVariables": { + "AMDGPU_TARGETS": "gfx900;gfx940;gfx941;gfx942;gfx1010;gfx1012;gfx1030;gfx1100;gfx1101;gfx1102;gfx1151;gfx1200;gfx1201;gfx906:xnack-;gfx908:xnack-;gfx90a:xnack+;gfx90a:xnack-" + } + } + ], + "buildPresets": [ + { + "name": "Default", + "configurePreset": "Default", + "configuration": "Release" + }, + { + "name": "CPU", + "configurePreset": "Default", + "targets": [ "ggml-cpu" ] + }, + { + "name": "CUDA", + "configurePreset": "CUDA", + "targets": [ "ggml-cuda" ] + }, + { + "name": "CUDA 11", + "inherits": [ "CUDA" ], + "configurePreset": "CUDA 11" + }, + { + "name": "CUDA 12", + "inherits": [ "CUDA" ], + "configurePreset": "CUDA 12" + }, + { + "name": "JetPack 5", + "inherits": [ "CUDA" ], + "configurePreset": "JetPack 5" + }, + { + "name": "JetPack 6", + "inherits": [ "CUDA" ], + "configurePreset": "JetPack 6" + }, + { + "name": "ROCm", + "configurePreset": "ROCm", + "targets": [ "ggml-hip" ] + }, + { + "name": "ROCm 6", + "inherits": [ "ROCm" ], + "configurePreset": "ROCm 6" + } + ] +} diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 0000000..c7028e0 --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,88 @@ +# Contributing to Ollama + +Thank you for your interest in contributing to Ollama! Here are a few guidelines to help get you started. + +## Set up + +See the [development documentation](./docs/development.md) for instructions on how to build and run Ollama locally. + +### Ideal issues + +* [Bugs](https://github.com/ollama/ollama/issues?q=is%3Aissue+is%3Aopen+label%3Abug): issues where Ollama stops working or where it results in an unexpected error. +* [Performance](https://github.com/ollama/ollama/issues?q=is%3Aissue+is%3Aopen+label%3Aperformance): issues to make Ollama faster at model inference, downloading or uploading. +* [Security](https://github.com/ollama/ollama/blob/main/SECURITY.md): issues that could lead to a security vulnerability. As mentioned in [SECURITY.md](https://github.com/ollama/ollama/blob/main/SECURITY.md), please do not disclose security vulnerabilities publicly. + +### Issues that are harder to review + +* New features: new features (e.g. API fields, environment variables) add surface area to Ollama and make it harder to maintain in the long run as they cannot be removed without potentially breaking users in the future. +* Refactoring: large code improvements are important, but can be harder or take longer to review and merge. +* Documentation: small updates to fill in or correct missing documentation is helpful, however large documentation additions can be hard to maintain over time. + +### Issues that may not be accepted + +* Changes that break backwards compatibility in Ollama's API (including the OpenAI-compatible API) +* Changes that add significant friction to the user experience +* Changes that create a large future maintenance burden for maintainers and contributors + +## Proposing a (non-trivial) change + +> By "non-trivial", we mean a change that is not a bug fix or small +> documentation update. If you are unsure, please ask us on our [Discord +> server](https://discord.gg/ollama). + +Before opening a non-trivial Pull Request, please open an issue to discuss the change and +get feedback from the maintainers. This helps us understand the context of the +change and how it fits into Ollama's roadmap and prevents us from duplicating +work or you from spending time on a change that we may not be able to accept. + +Tips for proposals: + +* Explain the problem you are trying to solve, not what you are trying to do. +* Explain why the change is important. +* Explain how the change will be used. +* Explain how the change will be tested. + +Additionally, for bonus points: Provide draft documentation you would expect to +see if the change were accepted. + +## Pull requests + +**Commit messages** + +The title should look like: + + : + +The package is the most affected Go package. If the change does not affect Go +code, then use the directory name instead. Changes to a single well-known +file in the root directory may use the file name. + +The short description should start with a lowercase letter and be a +continuation of the sentence: + + "This changes Ollama to..." + +Examples: + + llm/backend/mlx: support the llama architecture + CONTRIBUTING: provide clairity on good commit messages, and bad + +Bad Examples: + + feat: add more emoji + fix: was not using famous web framework + chore: generify code + +**Tests** + +Please include tests. Strive to test behavior, not implementation. + +**New dependencies** + +Dependencies should be added sparingly. If you are adding a new dependency, +please explain why it is necessary and what other ways you attempted that +did not work without it. + +## Need help? + +If you need help with anything, feel free to reach out to us on our [Discord server](https://discord.gg/ollama). diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..4c6619e --- /dev/null +++ b/Dockerfile @@ -0,0 +1,131 @@ +# vim: filetype=dockerfile + +ARG FLAVOR=${TARGETARCH} + +ARG ROCMVERSION=6.3.3 +ARG JETPACK5VERSION=r35.4.1 +ARG JETPACK6VERSION=r36.4.0 +ARG CMAKEVERSION=3.31.2 + +# CUDA v11 requires gcc v10. v10.3 has regressions, so the rockylinux 8.5 AppStream has the latest compatible version +FROM --platform=linux/amd64 rocm/dev-almalinux-8:${ROCMVERSION}-complete AS base-amd64 +RUN yum install -y yum-utils \ + && yum-config-manager --add-repo https://dl.rockylinux.org/vault/rocky/8.5/AppStream/\$basearch/os/ \ + && rpm --import https://dl.rockylinux.org/pub/rocky/RPM-GPG-KEY-Rocky-8 \ + && dnf install -y yum-utils ccache gcc-toolset-10-gcc-10.2.1-8.2.el8 gcc-toolset-10-gcc-c++-10.2.1-8.2.el8 gcc-toolset-10-binutils-2.35-11.el8 \ + && yum-config-manager --add-repo https://developer.download.nvidia.com/compute/cuda/repos/rhel8/x86_64/cuda-rhel8.repo +ENV PATH=/opt/rh/gcc-toolset-10/root/usr/bin:$PATH + +FROM --platform=linux/arm64 almalinux:8 AS base-arm64 +# install epel-release for ccache +RUN yum install -y yum-utils epel-release \ + && dnf install -y clang ccache \ + && yum-config-manager --add-repo https://developer.download.nvidia.com/compute/cuda/repos/rhel8/sbsa/cuda-rhel8.repo +ENV CC=clang CXX=clang++ + +FROM base-${TARGETARCH} AS base +ARG CMAKEVERSION +RUN curl -fsSL https://github.com/Kitware/CMake/releases/download/v${CMAKEVERSION}/cmake-${CMAKEVERSION}-linux-$(uname -m).tar.gz | tar xz -C /usr/local --strip-components 1 +COPY CMakeLists.txt CMakePresets.json . +COPY ml/backend/ggml/ggml ml/backend/ggml/ggml +ENV LDFLAGS=-s + +FROM base AS cpu +RUN dnf install -y gcc-toolset-11-gcc gcc-toolset-11-gcc-c++ +ENV PATH=/opt/rh/gcc-toolset-11/root/usr/bin:$PATH +RUN --mount=type=cache,target=/root/.ccache \ + cmake --preset 'CPU' \ + && cmake --build --parallel --preset 'CPU' \ + && cmake --install build --component CPU --strip --parallel 8 + +FROM base AS cuda-11 +ARG CUDA11VERSION=11.3 +RUN dnf install -y cuda-toolkit-${CUDA11VERSION//./-} +ENV PATH=/usr/local/cuda-11/bin:$PATH +RUN --mount=type=cache,target=/root/.ccache \ + cmake --preset 'CUDA 11' \ + && cmake --build --parallel --preset 'CUDA 11' \ + && cmake --install build --component CUDA --strip --parallel 8 + +FROM base AS cuda-12 +ARG CUDA12VERSION=12.8 +RUN dnf install -y cuda-toolkit-${CUDA12VERSION//./-} +ENV PATH=/usr/local/cuda-12/bin:$PATH +RUN --mount=type=cache,target=/root/.ccache \ + cmake --preset 'CUDA 12' \ + && cmake --build --parallel --preset 'CUDA 12' \ + && cmake --install build --component CUDA --strip --parallel 8 + +FROM base AS rocm-6 +ENV PATH=/opt/rocm/hcc/bin:/opt/rocm/hip/bin:/opt/rocm/bin:/opt/rocm/hcc/bin:$PATH +RUN --mount=type=cache,target=/root/.ccache \ + cmake --preset 'ROCm 6' \ + && cmake --build --parallel --preset 'ROCm 6' \ + && cmake --install build --component HIP --strip --parallel 8 + +FROM --platform=linux/arm64 nvcr.io/nvidia/l4t-jetpack:${JETPACK5VERSION} AS jetpack-5 +ARG CMAKEVERSION +RUN apt-get update && apt-get install -y curl ccache \ + && curl -fsSL https://github.com/Kitware/CMake/releases/download/v${CMAKEVERSION}/cmake-${CMAKEVERSION}-linux-$(uname -m).tar.gz | tar xz -C /usr/local --strip-components 1 +COPY CMakeLists.txt CMakePresets.json . +COPY ml/backend/ggml/ggml ml/backend/ggml/ggml +RUN --mount=type=cache,target=/root/.ccache \ + cmake --preset 'JetPack 5' \ + && cmake --build --parallel --preset 'JetPack 5' \ + && cmake --install build --component CUDA --strip --parallel 8 + +FROM --platform=linux/arm64 nvcr.io/nvidia/l4t-jetpack:${JETPACK6VERSION} AS jetpack-6 +ARG CMAKEVERSION +RUN apt-get update && apt-get install -y curl ccache \ + && curl -fsSL https://github.com/Kitware/CMake/releases/download/v${CMAKEVERSION}/cmake-${CMAKEVERSION}-linux-$(uname -m).tar.gz | tar xz -C /usr/local --strip-components 1 +COPY CMakeLists.txt CMakePresets.json . +COPY ml/backend/ggml/ggml ml/backend/ggml/ggml +RUN --mount=type=cache,target=/root/.ccache \ + cmake --preset 'JetPack 6' \ + && cmake --build --parallel --preset 'JetPack 6' \ + && cmake --install build --component CUDA --strip --parallel 8 + +FROM base AS build +WORKDIR /go/src/github.com/ollama/ollama +COPY go.mod go.sum . +RUN curl -fsSL https://golang.org/dl/go$(awk '/^go/ { print $2 }' go.mod).linux-$(case $(uname -m) in x86_64) echo amd64 ;; aarch64) echo arm64 ;; esac).tar.gz | tar xz -C /usr/local +ENV PATH=/usr/local/go/bin:$PATH +RUN go mod download +COPY . . +ARG GOFLAGS="'-ldflags=-w -s'" +ENV CGO_ENABLED=1 +RUN --mount=type=cache,target=/root/.cache/go-build \ + go build -trimpath -buildmode=pie -o /bin/ollama . + +FROM --platform=linux/amd64 scratch AS amd64 +COPY --from=cuda-11 dist/lib/ollama/cuda_v11 /lib/ollama/cuda_v11 +COPY --from=cuda-12 dist/lib/ollama/cuda_v12 /lib/ollama/cuda_v12 + +FROM --platform=linux/arm64 scratch AS arm64 +COPY --from=cuda-11 dist/lib/ollama/cuda_v11 /lib/ollama/cuda_v11 +COPY --from=cuda-12 dist/lib/ollama/cuda_v12 /lib/ollama/cuda_v12 +COPY --from=jetpack-5 dist/lib/ollama/cuda_v11 /lib/ollama/cuda_jetpack5 +COPY --from=jetpack-6 dist/lib/ollama/cuda_v12 /lib/ollama/cuda_jetpack6 + +FROM scratch AS rocm +COPY --from=rocm-6 dist/lib/ollama/rocm /lib/ollama/rocm + +FROM ${FLAVOR} AS archive +COPY --from=cpu dist/lib/ollama /lib/ollama +COPY --from=build /bin/ollama /bin/ollama + +FROM ubuntu:20.04 +RUN apt-get update \ + && apt-get install -y ca-certificates \ + && apt-get clean \ + && rm -rf /var/lib/apt/lists/* +COPY --from=archive /bin /usr/bin +ENV PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin +COPY --from=archive /lib/ollama /usr/lib/ollama +ENV LD_LIBRARY_PATH=/usr/local/nvidia/lib:/usr/local/nvidia/lib64 +ENV NVIDIA_DRIVER_CAPABILITIES=compute,utility +ENV NVIDIA_VISIBLE_DEVICES=all +ENV OLLAMA_HOST=0.0.0.0:11434 +EXPOSE 11434 +ENTRYPOINT ["/bin/ollama"] +CMD ["serve"] diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..8e3dc97 --- /dev/null +++ b/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) Ollama + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/Makefile.sync b/Makefile.sync new file mode 100644 index 0000000..711667c --- /dev/null +++ b/Makefile.sync @@ -0,0 +1,63 @@ +UPSTREAM=https://github.com/ggerganov/llama.cpp.git +WORKDIR=llama/vendor +FETCH_HEAD=de4c07f93783a1a96456a44dc16b9db538ee1618 + +.PHONY: help +help: + @echo "Available targets:" + @echo " sync Sync with upstream repositories" + @echo " checkout Checkout upstream repository" + @echo " apply-patches Apply patches to local repository" + @echo " format-patches Format patches from local repository" + @echo " clean Clean local repository" + @echo + @echo "Example:" + @echo " make -f $(lastword $(MAKEFILE_LIST)) clean sync" + +.PHONY: sync +sync: llama/build-info.cpp ml/backend/ggml/ggml/src/ggml-metal/ggml-metal-embed.metal + +llama/build-info.cpp: llama/build-info.cpp.in llama/llama.cpp + sed -e 's|@FETCH_HEAD@|$(FETCH_HEAD)|' <$< >$@ + +ml/backend/ggml/ggml/src/ggml-metal/ggml-metal-embed.metal: ml/backend/ggml/ggml + go generate ./$(@D) + +.PHONY: llama/llama.cpp +llama/llama.cpp: llama/vendor/ + rsync -arvzc -f "merge $@/.rsync-filter" $< $@ + +.PHONY: ml/backend/ggml/ggml +ml/backend/ggml/ggml: llama/vendor/ggml/ + rsync -arvzc -f "merge $@/.rsync-filter" $< $@ + +PATCHES=$(wildcard llama/patches/*.patch) +PATCHED=$(join $(dir $(PATCHES)), $(addsuffix ed, $(addprefix ., $(notdir $(PATCHES))))) + +.PHONY: apply-patches +.NOTPARALLEL: +apply-patches: $(PATCHED) + +llama/patches/.%.patched: llama/patches/%.patch + @if git -c user.name=nobody -c 'user.email=<>' -C $(WORKDIR) am -3 $(realpath $<); then touch $@; else git -C $(WORKDIR) am --abort; exit 1; fi + +.PHONY: checkout +checkout: $(WORKDIR) + git -C $(WORKDIR) fetch + git -C $(WORKDIR) checkout -f $(FETCH_HEAD) + +$(WORKDIR): + git clone $(UPSTREAM) $(WORKDIR) + +.PHONE: format-patches +format-patches: llama/patches + git -C $(WORKDIR) format-patch \ + --no-signature \ + --no-numbered \ + --zero-commit \ + -o $(realpath $<) \ + $(FETCH_HEAD) + +.PHONE: clean +clean: checkout + $(RM) llama/patches/.*.patched diff --git a/README.md b/README.md new file mode 100644 index 0000000..6a4815c --- /dev/null +++ b/README.md @@ -0,0 +1,600 @@ +
+ ollama + +
+ +# Ollama + +Get up and running with large language models. + +### macOS + +[Download](https://ollama.com/download/Ollama-darwin.zip) + +### Windows + +[Download](https://ollama.com/download/OllamaSetup.exe) + +### Linux + +```shell +curl -fsSL https://ollama.com/install.sh | sh +``` + +[Manual install instructions](https://github.com/ollama/ollama/blob/main/docs/linux.md) + +### Docker + +The official [Ollama Docker image](https://hub.docker.com/r/ollama/ollama) `ollama/ollama` is available on Docker Hub. + +### Libraries + +- [ollama-python](https://github.com/ollama/ollama-python) +- [ollama-js](https://github.com/ollama/ollama-js) + +### Community + +- [Discord](https://discord.gg/ollama) +- [Reddit](https://reddit.com/r/ollama) + +## Quickstart + +To run and chat with [Llama 3.2](https://ollama.com/library/llama3.2): + +```shell +ollama run llama3.2 +``` + +## Model library + +Ollama supports a list of models available on [ollama.com/library](https://ollama.com/library 'ollama model library') + +Here are some example models that can be downloaded: + +| Model | Parameters | Size | Download | +| ------------------ | ---------- | ----- | -------------------------------- | +| Gemma 3 | 1B | 815MB | `ollama run gemma3:1b` | +| Gemma 3 | 4B | 3.3GB | `ollama run gemma3` | +| Gemma 3 | 12B | 8.1GB | `ollama run gemma3:12b` | +| Gemma 3 | 27B | 17GB | `ollama run gemma3:27b` | +| QwQ | 32B | 20GB | `ollama run qwq` | +| DeepSeek-R1 | 7B | 4.7GB | `ollama run deepseek-r1` | +| DeepSeek-R1 | 671B | 404GB | `ollama run deepseek-r1:671b` | +| Llama 4 | 109B | 67GB | `ollama run llama4:scout` | +| Llama 4 | 400B | 245GB | `ollama run llama4:maverick` | +| Llama 3.3 | 70B | 43GB | `ollama run llama3.3` | +| Llama 3.2 | 3B | 2.0GB | `ollama run llama3.2` | +| Llama 3.2 | 1B | 1.3GB | `ollama run llama3.2:1b` | +| Llama 3.2 Vision | 11B | 7.9GB | `ollama run llama3.2-vision` | +| Llama 3.2 Vision | 90B | 55GB | `ollama run llama3.2-vision:90b` | +| Llama 3.1 | 8B | 4.7GB | `ollama run llama3.1` | +| Llama 3.1 | 405B | 231GB | `ollama run llama3.1:405b` | +| Phi 4 | 14B | 9.1GB | `ollama run phi4` | +| Phi 4 Mini | 3.8B | 2.5GB | `ollama run phi4-mini` | +| Mistral | 7B | 4.1GB | `ollama run mistral` | +| Moondream 2 | 1.4B | 829MB | `ollama run moondream` | +| Neural Chat | 7B | 4.1GB | `ollama run neural-chat` | +| Starling | 7B | 4.1GB | `ollama run starling-lm` | +| Code Llama | 7B | 3.8GB | `ollama run codellama` | +| Llama 2 Uncensored | 7B | 3.8GB | `ollama run llama2-uncensored` | +| LLaVA | 7B | 4.5GB | `ollama run llava` | +| Granite-3.3 | 8B | 4.9GB | `ollama run granite3.3` | + +> [!NOTE] +> You should have at least 8 GB of RAM available to run the 7B models, 16 GB to run the 13B models, and 32 GB to run the 33B models. + +## Customize a model + +### Import from GGUF + +Ollama supports importing GGUF models in the Modelfile: + +1. Create a file named `Modelfile`, with a `FROM` instruction with the local filepath to the model you want to import. + + ``` + FROM ./vicuna-33b.Q4_0.gguf + ``` + +2. Create the model in Ollama + + ```shell + ollama create example -f Modelfile + ``` + +3. Run the model + + ```shell + ollama run example + ``` + +### Import from Safetensors + +See the [guide](docs/import.md) on importing models for more information. + +### Customize a prompt + +Models from the Ollama library can be customized with a prompt. For example, to customize the `llama3.2` model: + +```shell +ollama pull llama3.2 +``` + +Create a `Modelfile`: + +``` +FROM llama3.2 + +# set the temperature to 1 [higher is more creative, lower is more coherent] +PARAMETER temperature 1 + +# set the system message +SYSTEM """ +You are Mario from Super Mario Bros. Answer as Mario, the assistant, only. +""" +``` + +Next, create and run the model: + +``` +ollama create mario -f ./Modelfile +ollama run mario +>>> hi +Hello! It's your friend Mario. +``` + +For more information on working with a Modelfile, see the [Modelfile](docs/modelfile.md) documentation. + +## CLI Reference + +### Create a model + +`ollama create` is used to create a model from a Modelfile. + +```shell +ollama create mymodel -f ./Modelfile +``` + +### Pull a model + +```shell +ollama pull llama3.2 +``` + +> This command can also be used to update a local model. Only the diff will be pulled. + +### Remove a model + +```shell +ollama rm llama3.2 +``` + +### Copy a model + +```shell +ollama cp llama3.2 my-model +``` + +### Multiline input + +For multiline input, you can wrap text with `"""`: + +``` +>>> """Hello, +... world! +... """ +I'm a basic program that prints the famous "Hello, world!" message to the console. +``` + +### Multimodal models + +``` +ollama run llava "What's in this image? /Users/jmorgan/Desktop/smile.png" +``` + +> **Output**: The image features a yellow smiley face, which is likely the central focus of the picture. + +### Pass the prompt as an argument + +```shell +ollama run llama3.2 "Summarize this file: $(cat README.md)" +``` + +> **Output**: Ollama is a lightweight, extensible framework for building and running language models on the local machine. It provides a simple API for creating, running, and managing models, as well as a library of pre-built models that can be easily used in a variety of applications. + +### Show model information + +```shell +ollama show llama3.2 +``` + +### List models on your computer + +```shell +ollama list +``` + +### List which models are currently loaded + +```shell +ollama ps +``` + +### Stop a model which is currently running + +```shell +ollama stop llama3.2 +``` + +### Start Ollama + +`ollama serve` is used when you want to start ollama without running the desktop application. + +## Building + +See the [developer guide](https://github.com/ollama/ollama/blob/main/docs/development.md) + +### Running local builds + +Next, start the server: + +```shell +./ollama serve +``` + +Finally, in a separate shell, run a model: + +```shell +./ollama run llama3.2 +``` + +## REST API + +Ollama has a REST API for running and managing models. + +### Generate a response + +```shell +curl http://localhost:11434/api/generate -d '{ + "model": "llama3.2", + "prompt":"Why is the sky blue?" +}' +``` + +### Chat with a model + +```shell +curl http://localhost:11434/api/chat -d '{ + "model": "llama3.2", + "messages": [ + { "role": "user", "content": "why is the sky blue?" } + ] +}' +``` + +See the [API documentation](./docs/api.md) for all endpoints. + +## Community Integrations + +### Web & Desktop + +- [Open WebUI](https://github.com/open-webui/open-webui) +- [SwiftChat (macOS with ReactNative)](https://github.com/aws-samples/swift-chat) +- [Enchanted (macOS native)](https://github.com/AugustDev/enchanted) +- [Hollama](https://github.com/fmaclen/hollama) +- [Lollms-Webui](https://github.com/ParisNeo/lollms-webui) +- [LibreChat](https://github.com/danny-avila/LibreChat) +- [Bionic GPT](https://github.com/bionic-gpt/bionic-gpt) +- [HTML UI](https://github.com/rtcfirefly/ollama-ui) +- [Saddle](https://github.com/jikkuatwork/saddle) +- [TagSpaces](https://www.tagspaces.org) (A platform for file-based apps, [utilizing Ollama](https://docs.tagspaces.org/ai/) for the generation of tags and descriptions) +- [Chatbot UI](https://github.com/ivanfioravanti/chatbot-ollama) +- [Chatbot UI v2](https://github.com/mckaywrigley/chatbot-ui) +- [Typescript UI](https://github.com/ollama-interface/Ollama-Gui?tab=readme-ov-file) +- [Minimalistic React UI for Ollama Models](https://github.com/richawo/minimal-llm-ui) +- [Ollamac](https://github.com/kevinhermawan/Ollamac) +- [big-AGI](https://github.com/enricoros/big-AGI) +- [Cheshire Cat assistant framework](https://github.com/cheshire-cat-ai/core) +- [Amica](https://github.com/semperai/amica) +- [chatd](https://github.com/BruceMacD/chatd) +- [Ollama-SwiftUI](https://github.com/kghandour/Ollama-SwiftUI) +- [Dify.AI](https://github.com/langgenius/dify) +- [MindMac](https://mindmac.app) +- [NextJS Web Interface for Ollama](https://github.com/jakobhoeg/nextjs-ollama-llm-ui) +- [Msty](https://msty.app) +- [Chatbox](https://github.com/Bin-Huang/Chatbox) +- [WinForm Ollama Copilot](https://github.com/tgraupmann/WinForm_Ollama_Copilot) +- [NextChat](https://github.com/ChatGPTNextWeb/ChatGPT-Next-Web) with [Get Started Doc](https://docs.nextchat.dev/models/ollama) +- [Alpaca WebUI](https://github.com/mmo80/alpaca-webui) +- [OllamaGUI](https://github.com/enoch1118/ollamaGUI) +- [OpenAOE](https://github.com/InternLM/OpenAOE) +- [Odin Runes](https://github.com/leonid20000/OdinRunes) +- [LLM-X](https://github.com/mrdjohnson/llm-x) (Progressive Web App) +- [AnythingLLM (Docker + MacOs/Windows/Linux native app)](https://github.com/Mintplex-Labs/anything-llm) +- [Ollama Basic Chat: Uses HyperDiv Reactive UI](https://github.com/rapidarchitect/ollama_basic_chat) +- [Ollama-chats RPG](https://github.com/drazdra/ollama-chats) +- [IntelliBar](https://intellibar.app/) (AI-powered assistant for macOS) +- [Jirapt](https://github.com/AliAhmedNada/jirapt) (Jira Integration to generate issues, tasks, epics) +- [ojira](https://github.com/AliAhmedNada/ojira) (Jira chrome plugin to easily generate descriptions for tasks) +- [QA-Pilot](https://github.com/reid41/QA-Pilot) (Interactive chat tool that can leverage Ollama models for rapid understanding and navigation of GitHub code repositories) +- [ChatOllama](https://github.com/sugarforever/chat-ollama) (Open Source Chatbot based on Ollama with Knowledge Bases) +- [CRAG Ollama Chat](https://github.com/Nagi-ovo/CRAG-Ollama-Chat) (Simple Web Search with Corrective RAG) +- [RAGFlow](https://github.com/infiniflow/ragflow) (Open-source Retrieval-Augmented Generation engine based on deep document understanding) +- [StreamDeploy](https://github.com/StreamDeploy-DevRel/streamdeploy-llm-app-scaffold) (LLM Application Scaffold) +- [chat](https://github.com/swuecho/chat) (chat web app for teams) +- [Lobe Chat](https://github.com/lobehub/lobe-chat) with [Integrating Doc](https://lobehub.com/docs/self-hosting/examples/ollama) +- [Ollama RAG Chatbot](https://github.com/datvodinh/rag-chatbot.git) (Local Chat with multiple PDFs using Ollama and RAG) +- [BrainSoup](https://www.nurgo-software.com/products/brainsoup) (Flexible native client with RAG & multi-agent automation) +- [macai](https://github.com/Renset/macai) (macOS client for Ollama, ChatGPT, and other compatible API back-ends) +- [RWKV-Runner](https://github.com/josStorer/RWKV-Runner) (RWKV offline LLM deployment tool, also usable as a client for ChatGPT and Ollama) +- [Ollama Grid Search](https://github.com/dezoito/ollama-grid-search) (app to evaluate and compare models) +- [Olpaka](https://github.com/Otacon/olpaka) (User-friendly Flutter Web App for Ollama) +- [Casibase](https://casibase.org) (An open source AI knowledge base and dialogue system combining the latest RAG, SSO, ollama support, and multiple large language models.) +- [OllamaSpring](https://github.com/CrazyNeil/OllamaSpring) (Ollama Client for macOS) +- [LLocal.in](https://github.com/kartikm7/llocal) (Easy to use Electron Desktop Client for Ollama) +- [Shinkai Desktop](https://github.com/dcSpark/shinkai-apps) (Two click install Local AI using Ollama + Files + RAG) +- [AiLama](https://github.com/zeyoyt/ailama) (A Discord User App that allows you to interact with Ollama anywhere in Discord) +- [Ollama with Google Mesop](https://github.com/rapidarchitect/ollama_mesop/) (Mesop Chat Client implementation with Ollama) +- [R2R](https://github.com/SciPhi-AI/R2R) (Open-source RAG engine) +- [Ollama-Kis](https://github.com/elearningshow/ollama-kis) (A simple easy-to-use GUI with sample custom LLM for Drivers Education) +- [OpenGPA](https://opengpa.org) (Open-source offline-first Enterprise Agentic Application) +- [Painting Droid](https://github.com/mateuszmigas/painting-droid) (Painting app with AI integrations) +- [Kerlig AI](https://www.kerlig.com/) (AI writing assistant for macOS) +- [AI Studio](https://github.com/MindWorkAI/AI-Studio) +- [Sidellama](https://github.com/gyopak/sidellama) (browser-based LLM client) +- [LLMStack](https://github.com/trypromptly/LLMStack) (No-code multi-agent framework to build LLM agents and workflows) +- [BoltAI for Mac](https://boltai.com) (AI Chat Client for Mac) +- [Harbor](https://github.com/av/harbor) (Containerized LLM Toolkit with Ollama as default backend) +- [PyGPT](https://github.com/szczyglis-dev/py-gpt) (AI desktop assistant for Linux, Windows, and Mac) +- [Alpaca](https://github.com/Jeffser/Alpaca) (An Ollama client application for Linux and macOS made with GTK4 and Adwaita) +- [AutoGPT](https://github.com/Significant-Gravitas/AutoGPT/blob/master/docs/content/platform/ollama.md) (AutoGPT Ollama integration) +- [Go-CREW](https://www.jonathanhecl.com/go-crew/) (Powerful Offline RAG in Golang) +- [PartCAD](https://github.com/openvmp/partcad/) (CAD model generation with OpenSCAD and CadQuery) +- [Ollama4j Web UI](https://github.com/ollama4j/ollama4j-web-ui) - Java-based Web UI for Ollama built with Vaadin, Spring Boot, and Ollama4j +- [PyOllaMx](https://github.com/kspviswa/pyOllaMx) - macOS application capable of chatting with both Ollama and Apple MLX models. +- [Cline](https://github.com/cline/cline) - Formerly known as Claude Dev is a VSCode extension for multi-file/whole-repo coding +- [Cherry Studio](https://github.com/kangfenmao/cherry-studio) (Desktop client with Ollama support) +- [ConfiChat](https://github.com/1runeberg/confichat) (Lightweight, standalone, multi-platform, and privacy-focused LLM chat interface with optional encryption) +- [Archyve](https://github.com/nickthecook/archyve) (RAG-enabling document library) +- [crewAI with Mesop](https://github.com/rapidarchitect/ollama-crew-mesop) (Mesop Web Interface to run crewAI with Ollama) +- [Tkinter-based client](https://github.com/chyok/ollama-gui) (Python tkinter-based Client for Ollama) +- [LLMChat](https://github.com/trendy-design/llmchat) (Privacy focused, 100% local, intuitive all-in-one chat interface) +- [Local Multimodal AI Chat](https://github.com/Leon-Sander/Local-Multimodal-AI-Chat) (Ollama-based LLM Chat with support for multiple features, including PDF RAG, voice chat, image-based interactions, and integration with OpenAI.) +- [ARGO](https://github.com/xark-argo/argo) (Locally download and run Ollama and Huggingface models with RAG on Mac/Windows/Linux) +- [OrionChat](https://github.com/EliasPereirah/OrionChat) - OrionChat is a web interface for chatting with different AI providers +- [G1](https://github.com/bklieger-groq/g1) (Prototype of using prompting strategies to improve the LLM's reasoning through o1-like reasoning chains.) +- [Web management](https://github.com/lemonit-eric-mao/ollama-web-management) (Web management page) +- [Promptery](https://github.com/promptery/promptery) (desktop client for Ollama.) +- [Ollama App](https://github.com/JHubi1/ollama-app) (Modern and easy-to-use multi-platform client for Ollama) +- [chat-ollama](https://github.com/annilq/chat-ollama) (a React Native client for Ollama) +- [SpaceLlama](https://github.com/tcsenpai/spacellama) (Firefox and Chrome extension to quickly summarize web pages with ollama in a sidebar) +- [YouLama](https://github.com/tcsenpai/youlama) (Webapp to quickly summarize any YouTube video, supporting Invidious as well) +- [DualMind](https://github.com/tcsenpai/dualmind) (Experimental app allowing two models to talk to each other in the terminal or in a web interface) +- [ollamarama-matrix](https://github.com/h1ddenpr0cess20/ollamarama-matrix) (Ollama chatbot for the Matrix chat protocol) +- [ollama-chat-app](https://github.com/anan1213095357/ollama-chat-app) (Flutter-based chat app) +- [Perfect Memory AI](https://www.perfectmemory.ai/) (Productivity AI assists personalized by what you have seen on your screen, heard, and said in the meetings) +- [Hexabot](https://github.com/hexastack/hexabot) (A conversational AI builder) +- [Reddit Rate](https://github.com/rapidarchitect/reddit_analyzer) (Search and Rate Reddit topics with a weighted summation) +- [OpenTalkGpt](https://github.com/adarshM84/OpenTalkGpt) (Chrome Extension to manage open-source models supported by Ollama, create custom models, and chat with models from a user-friendly UI) +- [VT](https://github.com/vinhnx/vt.ai) (A minimal multimodal AI chat app, with dynamic conversation routing. Supports local models via Ollama) +- [Nosia](https://github.com/nosia-ai/nosia) (Easy to install and use RAG platform based on Ollama) +- [Witsy](https://github.com/nbonamy/witsy) (An AI Desktop application available for Mac/Windows/Linux) +- [Abbey](https://github.com/US-Artificial-Intelligence/abbey) (A configurable AI interface server with notebooks, document storage, and YouTube support) +- [Minima](https://github.com/dmayboroda/minima) (RAG with on-premises or fully local workflow) +- [aidful-ollama-model-delete](https://github.com/AidfulAI/aidful-ollama-model-delete) (User interface for simplified model cleanup) +- [Perplexica](https://github.com/ItzCrazyKns/Perplexica) (An AI-powered search engine & an open-source alternative to Perplexity AI) +- [Ollama Chat WebUI for Docker ](https://github.com/oslook/ollama-webui) (Support for local docker deployment, lightweight ollama webui) +- [AI Toolkit for Visual Studio Code](https://aka.ms/ai-tooklit/ollama-docs) (Microsoft-official VSCode extension to chat, test, evaluate models with Ollama support, and use them in your AI applications.) +- [MinimalNextOllamaChat](https://github.com/anilkay/MinimalNextOllamaChat) (Minimal Web UI for Chat and Model Control) +- [Chipper](https://github.com/TilmanGriesel/chipper) AI interface for tinkerers (Ollama, Haystack RAG, Python) +- [ChibiChat](https://github.com/CosmicEventHorizon/ChibiChat) (Kotlin-based Android app to chat with Ollama and Koboldcpp API endpoints) +- [LocalLLM](https://github.com/qusaismael/localllm) (Minimal Web-App to run ollama models on it with a GUI) +- [Ollamazing](https://github.com/buiducnhat/ollamazing) (Web extension to run Ollama models) +- [OpenDeepResearcher-via-searxng](https://github.com/benhaotang/OpenDeepResearcher-via-searxng) (A Deep Research equivalent endpoint with Ollama support for running locally) +- [AntSK](https://github.com/AIDotNet/AntSK) (Out-of-the-box & Adaptable RAG Chatbot) +- [MaxKB](https://github.com/1Panel-dev/MaxKB/) (Ready-to-use & flexible RAG Chatbot) +- [yla](https://github.com/danielekp/yla) (Web interface to freely interact with your customized models) +- [LangBot](https://github.com/RockChinQ/LangBot) (LLM-based instant messaging bots platform, with Agents, RAG features, supports multiple platforms) +- [1Panel](https://github.com/1Panel-dev/1Panel/) (Web-based Linux Server Management Tool) +- [AstrBot](https://github.com/Soulter/AstrBot/) (User-friendly LLM-based multi-platform chatbot with a WebUI, supporting RAG, LLM agents, and plugins integration) +- [Reins](https://github.com/ibrahimcetin/reins) (Easily tweak parameters, customize system prompts per chat, and enhance your AI experiments with reasoning model support.) +- [Flufy](https://github.com/Aharon-Bensadoun/Flufy) (A beautiful chat interface for interacting with Ollama's API. Built with React, TypeScript, and Material-UI.) +- [Ellama](https://github.com/zeozeozeo/ellama) (Friendly native app to chat with an Ollama instance) +- [screenpipe](https://github.com/mediar-ai/screenpipe) Build agents powered by your screen history +- [Ollamb](https://github.com/hengkysteen/ollamb) (Simple yet rich in features, cross-platform built with Flutter and designed for Ollama. Try the [web demo](https://hengkysteen.github.io/demo/ollamb/).) +- [Writeopia](https://github.com/Writeopia/Writeopia) (Text editor with integration with Ollama) +- [AppFlowy](https://github.com/AppFlowy-IO/AppFlowy) (AI collaborative workspace with Ollama, cross-platform and self-hostable) +- [Lumina](https://github.com/cushydigit/lumina.git) (A lightweight, minimal React.js frontend for interacting with Ollama servers) +- [Tiny Notepad](https://pypi.org/project/tiny-notepad) (A lightweight, notepad-like interface to chat with ollama available on PyPI) + +### Cloud + +- [Google Cloud](https://cloud.google.com/run/docs/tutorials/gpu-gemma2-with-ollama) +- [Fly.io](https://fly.io/docs/python/do-more/add-ollama/) +- [Koyeb](https://www.koyeb.com/deploy/ollama) + +### Terminal + +- [oterm](https://github.com/ggozad/oterm) +- [Ellama Emacs client](https://github.com/s-kostyaev/ellama) +- [Emacs client](https://github.com/zweifisch/ollama) +- [neollama](https://github.com/paradoxical-dev/neollama) UI client for interacting with models from within Neovim +- [gen.nvim](https://github.com/David-Kunz/gen.nvim) +- [ollama.nvim](https://github.com/nomnivore/ollama.nvim) +- [ollero.nvim](https://github.com/marco-souza/ollero.nvim) +- [ollama-chat.nvim](https://github.com/gerazov/ollama-chat.nvim) +- [ogpt.nvim](https://github.com/huynle/ogpt.nvim) +- [gptel Emacs client](https://github.com/karthink/gptel) +- [Oatmeal](https://github.com/dustinblackman/oatmeal) +- [cmdh](https://github.com/pgibler/cmdh) +- [ooo](https://github.com/npahlfer/ooo) +- [shell-pilot](https://github.com/reid41/shell-pilot)(Interact with models via pure shell scripts on Linux or macOS) +- [tenere](https://github.com/pythops/tenere) +- [llm-ollama](https://github.com/taketwo/llm-ollama) for [Datasette's LLM CLI](https://llm.datasette.io/en/stable/). +- [typechat-cli](https://github.com/anaisbetts/typechat-cli) +- [ShellOracle](https://github.com/djcopley/ShellOracle) +- [tlm](https://github.com/yusufcanb/tlm) +- [podman-ollama](https://github.com/ericcurtin/podman-ollama) +- [gollama](https://github.com/sammcj/gollama) +- [ParLlama](https://github.com/paulrobello/parllama) +- [Ollama eBook Summary](https://github.com/cognitivetech/ollama-ebook-summary/) +- [Ollama Mixture of Experts (MOE) in 50 lines of code](https://github.com/rapidarchitect/ollama_moe) +- [vim-intelligence-bridge](https://github.com/pepo-ec/vim-intelligence-bridge) Simple interaction of "Ollama" with the Vim editor +- [x-cmd ollama](https://x-cmd.com/mod/ollama) +- [bb7](https://github.com/drunkwcodes/bb7) +- [SwollamaCLI](https://github.com/marcusziade/Swollama) bundled with the Swollama Swift package. [Demo](https://github.com/marcusziade/Swollama?tab=readme-ov-file#cli-usage) +- [aichat](https://github.com/sigoden/aichat) All-in-one LLM CLI tool featuring Shell Assistant, Chat-REPL, RAG, AI tools & agents, with access to OpenAI, Claude, Gemini, Ollama, Groq, and more. +- [PowershAI](https://github.com/rrg92/powershai) PowerShell module that brings AI to terminal on Windows, including support for Ollama +- [DeepShell](https://github.com/Abyss-c0re/deepshell) Your self-hosted AI assistant. Interactive Shell, Files and Folders analysis. +- [orbiton](https://github.com/xyproto/orbiton) Configuration-free text editor and IDE with support for tab completion with Ollama. +- [orca-cli](https://github.com/molbal/orca-cli) Ollama Registry CLI Application - Browse, pull, and download models from Ollama Registry in your terminal. +- [GGUF-to-Ollama](https://github.com/jonathanhecl/gguf-to-ollama) - Importing GGUF to Ollama made easy (multiplatform) + +### Apple Vision Pro + +- [SwiftChat](https://github.com/aws-samples/swift-chat) (Cross-platform AI chat app supporting Apple Vision Pro via "Designed for iPad") +- [Enchanted](https://github.com/AugustDev/enchanted) + +### Database + +- [pgai](https://github.com/timescale/pgai) - PostgreSQL as a vector database (Create and search embeddings from Ollama models using pgvector) + - [Get started guide](https://github.com/timescale/pgai/blob/main/docs/vectorizer-quick-start.md) +- [MindsDB](https://github.com/mindsdb/mindsdb/blob/staging/mindsdb/integrations/handlers/ollama_handler/README.md) (Connects Ollama models with nearly 200 data platforms and apps) +- [chromem-go](https://github.com/philippgille/chromem-go/blob/v0.5.0/embed_ollama.go) with [example](https://github.com/philippgille/chromem-go/tree/v0.5.0/examples/rag-wikipedia-ollama) +- [Kangaroo](https://github.com/dbkangaroo/kangaroo) (AI-powered SQL client and admin tool for popular databases) + +### Package managers + +- [Pacman](https://archlinux.org/packages/extra/x86_64/ollama/) +- [Gentoo](https://github.com/gentoo/guru/tree/master/app-misc/ollama) +- [Homebrew](https://formulae.brew.sh/formula/ollama) +- [Helm Chart](https://artifacthub.io/packages/helm/ollama-helm/ollama) +- [Guix channel](https://codeberg.org/tusharhero/ollama-guix) +- [Nix package](https://search.nixos.org/packages?show=ollama&from=0&size=50&sort=relevance&type=packages&query=ollama) +- [Flox](https://flox.dev/blog/ollama-part-one) + +### Libraries + +- [LangChain](https://python.langchain.com/docs/integrations/chat/ollama/) and [LangChain.js](https://js.langchain.com/docs/integrations/chat/ollama/) with [example](https://js.langchain.com/docs/tutorials/local_rag/) +- [Firebase Genkit](https://firebase.google.com/docs/genkit/plugins/ollama) +- [crewAI](https://github.com/crewAIInc/crewAI) +- [Yacana](https://remembersoftwares.github.io/yacana/) (User-friendly multi-agent framework for brainstorming and executing predetermined flows with built-in tool integration) +- [Spring AI](https://github.com/spring-projects/spring-ai) with [reference](https://docs.spring.io/spring-ai/reference/api/chat/ollama-chat.html) and [example](https://github.com/tzolov/ollama-tools) +- [LangChainGo](https://github.com/tmc/langchaingo/) with [example](https://github.com/tmc/langchaingo/tree/main/examples/ollama-completion-example) +- [LangChain4j](https://github.com/langchain4j/langchain4j) with [example](https://github.com/langchain4j/langchain4j-examples/tree/main/ollama-examples/src/main/java) +- [LangChainRust](https://github.com/Abraxas-365/langchain-rust) with [example](https://github.com/Abraxas-365/langchain-rust/blob/main/examples/llm_ollama.rs) +- [LangChain for .NET](https://github.com/tryAGI/LangChain) with [example](https://github.com/tryAGI/LangChain/blob/main/examples/LangChain.Samples.OpenAI/Program.cs) +- [LLPhant](https://github.com/theodo-group/LLPhant?tab=readme-ov-file#ollama) +- [LlamaIndex](https://docs.llamaindex.ai/en/stable/examples/llm/ollama/) and [LlamaIndexTS](https://ts.llamaindex.ai/modules/llms/available_llms/ollama) +- [LiteLLM](https://github.com/BerriAI/litellm) +- [OllamaFarm for Go](https://github.com/presbrey/ollamafarm) +- [OllamaSharp for .NET](https://github.com/awaescher/OllamaSharp) +- [Ollama for Ruby](https://github.com/gbaptista/ollama-ai) +- [Ollama-rs for Rust](https://github.com/pepperoni21/ollama-rs) +- [Ollama-hpp for C++](https://github.com/jmont-dev/ollama-hpp) +- [Ollama4j for Java](https://github.com/ollama4j/ollama4j) +- [ModelFusion Typescript Library](https://modelfusion.dev/integration/model-provider/ollama) +- [OllamaKit for Swift](https://github.com/kevinhermawan/OllamaKit) +- [Ollama for Dart](https://github.com/breitburg/dart-ollama) +- [Ollama for Laravel](https://github.com/cloudstudio/ollama-laravel) +- [LangChainDart](https://github.com/davidmigloz/langchain_dart) +- [Semantic Kernel - Python](https://github.com/microsoft/semantic-kernel/tree/main/python/semantic_kernel/connectors/ai/ollama) +- [Haystack](https://github.com/deepset-ai/haystack-integrations/blob/main/integrations/ollama.md) +- [Elixir LangChain](https://github.com/brainlid/langchain) +- [Ollama for R - rollama](https://github.com/JBGruber/rollama) +- [Ollama for R - ollama-r](https://github.com/hauselin/ollama-r) +- [Ollama-ex for Elixir](https://github.com/lebrunel/ollama-ex) +- [Ollama Connector for SAP ABAP](https://github.com/b-tocs/abap_btocs_ollama) +- [Testcontainers](https://testcontainers.com/modules/ollama/) +- [Portkey](https://portkey.ai/docs/welcome/integration-guides/ollama) +- [PromptingTools.jl](https://github.com/svilupp/PromptingTools.jl) with an [example](https://svilupp.github.io/PromptingTools.jl/dev/examples/working_with_ollama) +- [LlamaScript](https://github.com/Project-Llama/llamascript) +- [llm-axe](https://github.com/emirsahin1/llm-axe) (Python Toolkit for Building LLM Powered Apps) +- [Gollm](https://docs.gollm.co/examples/ollama-example) +- [Gollama for Golang](https://github.com/jonathanhecl/gollama) +- [Ollamaclient for Golang](https://github.com/xyproto/ollamaclient) +- [High-level function abstraction in Go](https://gitlab.com/tozd/go/fun) +- [Ollama PHP](https://github.com/ArdaGnsrn/ollama-php) +- [Agents-Flex for Java](https://github.com/agents-flex/agents-flex) with [example](https://github.com/agents-flex/agents-flex/tree/main/agents-flex-llm/agents-flex-llm-ollama/src/test/java/com/agentsflex/llm/ollama) +- [Parakeet](https://github.com/parakeet-nest/parakeet) is a GoLang library, made to simplify the development of small generative AI applications with Ollama. +- [Haverscript](https://github.com/andygill/haverscript) with [examples](https://github.com/andygill/haverscript/tree/main/examples) +- [Ollama for Swift](https://github.com/mattt/ollama-swift) +- [Swollama for Swift](https://github.com/marcusziade/Swollama) with [DocC](https://marcusziade.github.io/Swollama/documentation/swollama/) +- [GoLamify](https://github.com/prasad89/golamify) +- [Ollama for Haskell](https://github.com/tusharad/ollama-haskell) +- [multi-llm-ts](https://github.com/nbonamy/multi-llm-ts) (A Typescript/JavaScript library allowing access to different LLM in a unified API) +- [LlmTornado](https://github.com/lofcz/llmtornado) (C# library providing a unified interface for major FOSS & Commercial inference APIs) +- [Ollama for Zig](https://github.com/dravenk/ollama-zig) +- [Abso](https://github.com/lunary-ai/abso) (OpenAI-compatible TypeScript SDK for any LLM provider) +- [Nichey](https://github.com/goodreasonai/nichey) is a Python package for generating custom wikis for your research topic +- [Ollama for D](https://github.com/kassane/ollama-d) +- [OllamaPlusPlus](https://github.com/HardCodeDev777/OllamaPlusPlus) (Very simple C++ library for Ollama) + +### Mobile + +- [SwiftChat](https://github.com/aws-samples/swift-chat) (Lightning-fast Cross-platform AI chat app with native UI for Android, iOS, and iPad) +- [Enchanted](https://github.com/AugustDev/enchanted) +- [Maid](https://github.com/Mobile-Artificial-Intelligence/maid) +- [Ollama App](https://github.com/JHubi1/ollama-app) (Modern and easy-to-use multi-platform client for Ollama) +- [ConfiChat](https://github.com/1runeberg/confichat) (Lightweight, standalone, multi-platform, and privacy-focused LLM chat interface with optional encryption) +- [Ollama Android Chat](https://github.com/sunshine0523/OllamaServer) (No need for Termux, start the Ollama service with one click on an Android device) +- [Reins](https://github.com/ibrahimcetin/reins) (Easily tweak parameters, customize system prompts per chat, and enhance your AI experiments with reasoning model support.) + +### Extensions & Plugins + +- [Raycast extension](https://github.com/MassimilianoPasquini97/raycast_ollama) +- [Discollama](https://github.com/mxyng/discollama) (Discord bot inside the Ollama discord channel) +- [Continue](https://github.com/continuedev/continue) +- [Vibe](https://github.com/thewh1teagle/vibe) (Transcribe and analyze meetings with Ollama) +- [Obsidian Ollama plugin](https://github.com/hinterdupfinger/obsidian-ollama) +- [Logseq Ollama plugin](https://github.com/omagdy7/ollama-logseq) +- [NotesOllama](https://github.com/andersrex/notesollama) (Apple Notes Ollama plugin) +- [Dagger Chatbot](https://github.com/samalba/dagger-chatbot) +- [Discord AI Bot](https://github.com/mekb-turtle/discord-ai-bot) +- [Ollama Telegram Bot](https://github.com/ruecat/ollama-telegram) +- [Hass Ollama Conversation](https://github.com/ej52/hass-ollama-conversation) +- [Rivet plugin](https://github.com/abrenneke/rivet-plugin-ollama) +- [Obsidian BMO Chatbot plugin](https://github.com/longy2k/obsidian-bmo-chatbot) +- [Cliobot](https://github.com/herval/cliobot) (Telegram bot with Ollama support) +- [Copilot for Obsidian plugin](https://github.com/logancyang/obsidian-copilot) +- [Obsidian Local GPT plugin](https://github.com/pfrankov/obsidian-local-gpt) +- [Open Interpreter](https://docs.openinterpreter.com/language-model-setup/local-models/ollama) +- [Llama Coder](https://github.com/ex3ndr/llama-coder) (Copilot alternative using Ollama) +- [Ollama Copilot](https://github.com/bernardo-bruning/ollama-copilot) (Proxy that allows you to use Ollama as a copilot like GitHub Copilot) +- [twinny](https://github.com/rjmacarthy/twinny) (Copilot and Copilot chat alternative using Ollama) +- [Wingman-AI](https://github.com/RussellCanfield/wingman-ai) (Copilot code and chat alternative using Ollama and Hugging Face) +- [Page Assist](https://github.com/n4ze3m/page-assist) (Chrome Extension) +- [Plasmoid Ollama Control](https://github.com/imoize/plasmoid-ollamacontrol) (KDE Plasma extension that allows you to quickly manage/control Ollama model) +- [AI Telegram Bot](https://github.com/tusharhero/aitelegrambot) (Telegram bot using Ollama in backend) +- [AI ST Completion](https://github.com/yaroslavyaroslav/OpenAI-sublime-text) (Sublime Text 4 AI assistant plugin with Ollama support) +- [Discord-Ollama Chat Bot](https://github.com/kevinthedang/discord-ollama) (Generalized TypeScript Discord Bot w/ Tuning Documentation) +- [ChatGPTBox: All in one browser extension](https://github.com/josStorer/chatGPTBox) with [Integrating Tutorial](https://github.com/josStorer/chatGPTBox/issues/616#issuecomment-1975186467) +- [Discord AI chat/moderation bot](https://github.com/rapmd73/Companion) Chat/moderation bot written in python. Uses Ollama to create personalities. +- [Headless Ollama](https://github.com/nischalj10/headless-ollama) (Scripts to automatically install ollama client & models on any OS for apps that depend on ollama server) +- [Terraform AWS Ollama & Open WebUI](https://github.com/xuyangbocn/terraform-aws-self-host-llm) (A Terraform module to deploy on AWS a ready-to-use Ollama service, together with its front-end Open WebUI service.) +- [node-red-contrib-ollama](https://github.com/jakubburkiewicz/node-red-contrib-ollama) +- [Local AI Helper](https://github.com/ivostoykov/localAI) (Chrome and Firefox extensions that enable interactions with the active tab and customisable API endpoints. Includes secure storage for user prompts.) +- [vnc-lm](https://github.com/jake83741/vnc-lm) (Discord bot for messaging with LLMs through Ollama and LiteLLM. Seamlessly move between local and flagship models.) +- [LSP-AI](https://github.com/SilasMarvin/lsp-ai) (Open-source language server for AI-powered functionality) +- [QodeAssist](https://github.com/Palm1r/QodeAssist) (AI-powered coding assistant plugin for Qt Creator) +- [Obsidian Quiz Generator plugin](https://github.com/ECuiDev/obsidian-quiz-generator) +- [AI Summmary Helper plugin](https://github.com/philffm/ai-summary-helper) +- [TextCraft](https://github.com/suncloudsmoon/TextCraft) (Copilot in Word alternative using Ollama) +- [Alfred Ollama](https://github.com/zeitlings/alfred-ollama) (Alfred Workflow) +- [TextLLaMA](https://github.com/adarshM84/TextLLaMA) A Chrome Extension that helps you write emails, correct grammar, and translate into any language +- [Simple-Discord-AI](https://github.com/zyphixor/simple-discord-ai) +- [LLM Telegram Bot](https://github.com/innightwolfsleep/llm_telegram_bot) (telegram bot, primary for RP. Oobabooga-like buttons, [A1111](https://github.com/AUTOMATIC1111/stable-diffusion-webui) API integration e.t.c) +- [mcp-llm](https://github.com/sammcj/mcp-llm) (MCP Server to allow LLMs to call other LLMs) +- [UnityCodeLama](https://github.com/HardCodeDev777/UnityCodeLama) (Unity Edtior tool to analyze scripts via Ollama) + +### Supported backends + +- [llama.cpp](https://github.com/ggerganov/llama.cpp) project founded by Georgi Gerganov. + +### Observability +- [Opik](https://www.comet.com/docs/opik/cookbook/ollama) is an open-source platform to debug, evaluate, and monitor your LLM applications, RAG systems, and agentic workflows with comprehensive tracing, automated evaluations, and production-ready dashboards. Opik supports native intergration to Ollama. +- [Lunary](https://lunary.ai/docs/integrations/ollama) is the leading open-source LLM observability platform. It provides a variety of enterprise-grade features such as real-time analytics, prompt templates management, PII masking, and comprehensive agent tracing. +- [OpenLIT](https://github.com/openlit/openlit) is an OpenTelemetry-native tool for monitoring Ollama Applications & GPUs using traces and metrics. +- [HoneyHive](https://docs.honeyhive.ai/integrations/ollama) is an AI observability and evaluation platform for AI agents. Use HoneyHive to evaluate agent performance, interrogate failures, and monitor quality in production. +- [Langfuse](https://langfuse.com/docs/integrations/ollama) is an open source LLM observability platform that enables teams to collaboratively monitor, evaluate and debug AI applications. +- [MLflow Tracing](https://mlflow.org/docs/latest/llms/tracing/index.html#automatic-tracing) is an open source LLM observability tool with a convenient API to log and visualize traces, making it easy to debug and evaluate GenAI applications. diff --git a/SECURITY.md b/SECURITY.md new file mode 100644 index 0000000..d38bb7c --- /dev/null +++ b/SECURITY.md @@ -0,0 +1,25 @@ +# Security + +The Ollama maintainer team takes security seriously and will actively work to resolve security issues. + +## Reporting a vulnerability + +If you discover a security vulnerability, please do not open a public issue. Instead, please report it by emailing hello@ollama.com. We ask that you give us sufficient time to investigate and address the vulnerability before disclosing it publicly. + +Please include the following details in your report: +- A description of the vulnerability +- Steps to reproduce the issue +- Your assessment of the potential impact +- Any possible mitigations + +## Security best practices + +While the maintainer team does their best to secure Ollama, users are encouraged to implement their own security best practices, such as: + +- Regularly updating to the latest version of Ollama +- Securing access to hosted instances of Ollama +- Monitoring systems for unusual activity + +## Contact + +For any other questions or concerns related to security, please contact us at hello@ollama.com diff --git a/api/client.go b/api/client.go new file mode 100644 index 0000000..3dffce6 --- /dev/null +++ b/api/client.go @@ -0,0 +1,380 @@ +// Package api implements the client-side API for code wishing to interact +// with the ollama service. The methods of the [Client] type correspond to +// the ollama REST API as described in [the API documentation]. +// The ollama command-line client itself uses this package to interact with +// the backend service. +// +// # Examples +// +// Several examples of using this package are available [in the GitHub +// repository]. +// +// [the API documentation]: https://github.com/ollama/ollama/blob/main/docs/api.md +// [in the GitHub repository]: https://github.com/ollama/ollama/tree/main/api/examples +package api + +import ( + "bufio" + "bytes" + "context" + "encoding/json" + "errors" + "fmt" + "io" + "net/http" + "net/url" + "runtime" + + "github.com/ollama/ollama/envconfig" + "github.com/ollama/ollama/format" + "github.com/ollama/ollama/version" +) + +// Client encapsulates client state for interacting with the ollama +// service. Use [ClientFromEnvironment] to create new Clients. +type Client struct { + base *url.URL + http *http.Client +} + +func checkError(resp *http.Response, body []byte) error { + if resp.StatusCode < http.StatusBadRequest { + return nil + } + + apiError := StatusError{StatusCode: resp.StatusCode} + + err := json.Unmarshal(body, &apiError) + if err != nil { + // Use the full body as the message if we fail to decode a response. + apiError.ErrorMessage = string(body) + } + + return apiError +} + +// ClientFromEnvironment creates a new [Client] using configuration from the +// environment variable OLLAMA_HOST, which points to the network host and +// port on which the ollama service is listening. The format of this variable +// is: +// +// ://: +// +// If the variable is not specified, a default ollama host and port will be +// used. +func ClientFromEnvironment() (*Client, error) { + return &Client{ + base: envconfig.Host(), + http: http.DefaultClient, + }, nil +} + +func NewClient(base *url.URL, http *http.Client) *Client { + return &Client{ + base: base, + http: http, + } +} + +func (c *Client) do(ctx context.Context, method, path string, reqData, respData any) error { + var reqBody io.Reader + var data []byte + var err error + + switch reqData := reqData.(type) { + case io.Reader: + // reqData is already an io.Reader + reqBody = reqData + case nil: + // noop + default: + data, err = json.Marshal(reqData) + if err != nil { + return err + } + + reqBody = bytes.NewReader(data) + } + + requestURL := c.base.JoinPath(path) + request, err := http.NewRequestWithContext(ctx, method, requestURL.String(), reqBody) + if err != nil { + return err + } + + request.Header.Set("Content-Type", "application/json") + request.Header.Set("Accept", "application/json") + request.Header.Set("User-Agent", fmt.Sprintf("ollama/%s (%s %s) Go/%s", version.Version, runtime.GOARCH, runtime.GOOS, runtime.Version())) + + respObj, err := c.http.Do(request) + if err != nil { + return err + } + defer respObj.Body.Close() + + respBody, err := io.ReadAll(respObj.Body) + if err != nil { + return err + } + + if err := checkError(respObj, respBody); err != nil { + return err + } + + if len(respBody) > 0 && respData != nil { + if err := json.Unmarshal(respBody, respData); err != nil { + return err + } + } + return nil +} + +const maxBufferSize = 512 * format.KiloByte + +func (c *Client) stream(ctx context.Context, method, path string, data any, fn func([]byte) error) error { + var buf io.Reader + if data != nil { + bts, err := json.Marshal(data) + if err != nil { + return err + } + + buf = bytes.NewBuffer(bts) + } + + requestURL := c.base.JoinPath(path) + request, err := http.NewRequestWithContext(ctx, method, requestURL.String(), buf) + if err != nil { + return err + } + + request.Header.Set("Content-Type", "application/json") + request.Header.Set("Accept", "application/x-ndjson") + request.Header.Set("User-Agent", fmt.Sprintf("ollama/%s (%s %s) Go/%s", version.Version, runtime.GOARCH, runtime.GOOS, runtime.Version())) + + response, err := c.http.Do(request) + if err != nil { + return err + } + defer response.Body.Close() + + scanner := bufio.NewScanner(response.Body) + // increase the buffer size to avoid running out of space + scanBuf := make([]byte, 0, maxBufferSize) + scanner.Buffer(scanBuf, maxBufferSize) + for scanner.Scan() { + var errorResponse struct { + Error string `json:"error,omitempty"` + } + + bts := scanner.Bytes() + if err := json.Unmarshal(bts, &errorResponse); err != nil { + return fmt.Errorf("unmarshal: %w", err) + } + + if errorResponse.Error != "" { + return errors.New(errorResponse.Error) + } + + if response.StatusCode >= http.StatusBadRequest { + return StatusError{ + StatusCode: response.StatusCode, + Status: response.Status, + ErrorMessage: errorResponse.Error, + } + } + + if err := fn(bts); err != nil { + return err + } + } + + return nil +} + +// GenerateResponseFunc is a function that [Client.Generate] invokes every time +// a response is received from the service. If this function returns an error, +// [Client.Generate] will stop generating and return this error. +type GenerateResponseFunc func(GenerateResponse) error + +// Generate generates a response for a given prompt. The req parameter should +// be populated with prompt details. fn is called for each response (there may +// be multiple responses, e.g. in case streaming is enabled). +func (c *Client) Generate(ctx context.Context, req *GenerateRequest, fn GenerateResponseFunc) error { + return c.stream(ctx, http.MethodPost, "/api/generate", req, func(bts []byte) error { + var resp GenerateResponse + if err := json.Unmarshal(bts, &resp); err != nil { + return err + } + + return fn(resp) + }) +} + +// ChatResponseFunc is a function that [Client.Chat] invokes every time +// a response is received from the service. If this function returns an error, +// [Client.Chat] will stop generating and return this error. +type ChatResponseFunc func(ChatResponse) error + +// Chat generates the next message in a chat. [ChatRequest] may contain a +// sequence of messages which can be used to maintain chat history with a model. +// fn is called for each response (there may be multiple responses, e.g. if case +// streaming is enabled). +func (c *Client) Chat(ctx context.Context, req *ChatRequest, fn ChatResponseFunc) error { + return c.stream(ctx, http.MethodPost, "/api/chat", req, func(bts []byte) error { + var resp ChatResponse + if err := json.Unmarshal(bts, &resp); err != nil { + return err + } + + return fn(resp) + }) +} + +// PullProgressFunc is a function that [Client.Pull] invokes every time there +// is progress with a "pull" request sent to the service. If this function +// returns an error, [Client.Pull] will stop the process and return this error. +type PullProgressFunc func(ProgressResponse) error + +// Pull downloads a model from the ollama library. fn is called each time +// progress is made on the request and can be used to display a progress bar, +// etc. +func (c *Client) Pull(ctx context.Context, req *PullRequest, fn PullProgressFunc) error { + return c.stream(ctx, http.MethodPost, "/api/pull", req, func(bts []byte) error { + var resp ProgressResponse + if err := json.Unmarshal(bts, &resp); err != nil { + return err + } + + return fn(resp) + }) +} + +// PushProgressFunc is a function that [Client.Push] invokes when progress is +// made. +// It's similar to other progress function types like [PullProgressFunc]. +type PushProgressFunc func(ProgressResponse) error + +// Push uploads a model to the model library; requires registering for ollama.ai +// and adding a public key first. fn is called each time progress is made on +// the request and can be used to display a progress bar, etc. +func (c *Client) Push(ctx context.Context, req *PushRequest, fn PushProgressFunc) error { + return c.stream(ctx, http.MethodPost, "/api/push", req, func(bts []byte) error { + var resp ProgressResponse + if err := json.Unmarshal(bts, &resp); err != nil { + return err + } + + return fn(resp) + }) +} + +// CreateProgressFunc is a function that [Client.Create] invokes when progress +// is made. +// It's similar to other progress function types like [PullProgressFunc]. +type CreateProgressFunc func(ProgressResponse) error + +// Create creates a model from a [Modelfile]. fn is a progress function that +// behaves similarly to other methods (see [Client.Pull]). +// +// [Modelfile]: https://github.com/ollama/ollama/blob/main/docs/modelfile.md +func (c *Client) Create(ctx context.Context, req *CreateRequest, fn CreateProgressFunc) error { + return c.stream(ctx, http.MethodPost, "/api/create", req, func(bts []byte) error { + var resp ProgressResponse + if err := json.Unmarshal(bts, &resp); err != nil { + return err + } + + return fn(resp) + }) +} + +// List lists models that are available locally. +func (c *Client) List(ctx context.Context) (*ListResponse, error) { + var lr ListResponse + if err := c.do(ctx, http.MethodGet, "/api/tags", nil, &lr); err != nil { + return nil, err + } + return &lr, nil +} + +// ListRunning lists running models. +func (c *Client) ListRunning(ctx context.Context) (*ProcessResponse, error) { + var lr ProcessResponse + if err := c.do(ctx, http.MethodGet, "/api/ps", nil, &lr); err != nil { + return nil, err + } + return &lr, nil +} + +// Copy copies a model - creating a model with another name from an existing +// model. +func (c *Client) Copy(ctx context.Context, req *CopyRequest) error { + if err := c.do(ctx, http.MethodPost, "/api/copy", req, nil); err != nil { + return err + } + return nil +} + +// Delete deletes a model and its data. +func (c *Client) Delete(ctx context.Context, req *DeleteRequest) error { + if err := c.do(ctx, http.MethodDelete, "/api/delete", req, nil); err != nil { + return err + } + return nil +} + +// Show obtains model information, including details, modelfile, license etc. +func (c *Client) Show(ctx context.Context, req *ShowRequest) (*ShowResponse, error) { + var resp ShowResponse + if err := c.do(ctx, http.MethodPost, "/api/show", req, &resp); err != nil { + return nil, err + } + return &resp, nil +} + +// Heartbeat checks if the server has started and is responsive; if yes, it +// returns nil, otherwise an error. +func (c *Client) Heartbeat(ctx context.Context) error { + if err := c.do(ctx, http.MethodHead, "/", nil, nil); err != nil { + return err + } + return nil +} + +// Embed generates embeddings from a model. +func (c *Client) Embed(ctx context.Context, req *EmbedRequest) (*EmbedResponse, error) { + var resp EmbedResponse + if err := c.do(ctx, http.MethodPost, "/api/embed", req, &resp); err != nil { + return nil, err + } + return &resp, nil +} + +// Embeddings generates an embedding from a model. +func (c *Client) Embeddings(ctx context.Context, req *EmbeddingRequest) (*EmbeddingResponse, error) { + var resp EmbeddingResponse + if err := c.do(ctx, http.MethodPost, "/api/embeddings", req, &resp); err != nil { + return nil, err + } + return &resp, nil +} + +// CreateBlob creates a blob from a file on the server. digest is the +// expected SHA256 digest of the file, and r represents the file. +func (c *Client) CreateBlob(ctx context.Context, digest string, r io.Reader) error { + return c.do(ctx, http.MethodPost, fmt.Sprintf("/api/blobs/%s", digest), r, nil) +} + +// Version returns the Ollama server version as a string. +func (c *Client) Version(ctx context.Context) (string, error) { + var version struct { + Version string `json:"version"` + } + + if err := c.do(ctx, http.MethodGet, "/api/version", nil, &version); err != nil { + return "", err + } + + return version.Version, nil +} diff --git a/api/client_test.go b/api/client_test.go new file mode 100644 index 0000000..2ceeec9 --- /dev/null +++ b/api/client_test.go @@ -0,0 +1,254 @@ +package api + +import ( + "encoding/json" + "fmt" + "net/http" + "net/http/httptest" + "net/url" + "strings" + "testing" +) + +func TestClientFromEnvironment(t *testing.T) { + type testCase struct { + value string + expect string + err error + } + + testCases := map[string]*testCase{ + "empty": {value: "", expect: "http://127.0.0.1:11434"}, + "only address": {value: "1.2.3.4", expect: "http://1.2.3.4:11434"}, + "only port": {value: ":1234", expect: "http://:1234"}, + "address and port": {value: "1.2.3.4:1234", expect: "http://1.2.3.4:1234"}, + "scheme http and address": {value: "http://1.2.3.4", expect: "http://1.2.3.4:80"}, + "scheme https and address": {value: "https://1.2.3.4", expect: "https://1.2.3.4:443"}, + "scheme, address, and port": {value: "https://1.2.3.4:1234", expect: "https://1.2.3.4:1234"}, + "hostname": {value: "example.com", expect: "http://example.com:11434"}, + "hostname and port": {value: "example.com:1234", expect: "http://example.com:1234"}, + "scheme http and hostname": {value: "http://example.com", expect: "http://example.com:80"}, + "scheme https and hostname": {value: "https://example.com", expect: "https://example.com:443"}, + "scheme, hostname, and port": {value: "https://example.com:1234", expect: "https://example.com:1234"}, + "trailing slash": {value: "example.com/", expect: "http://example.com:11434"}, + "trailing slash port": {value: "example.com:1234/", expect: "http://example.com:1234"}, + } + + for k, v := range testCases { + t.Run(k, func(t *testing.T) { + t.Setenv("OLLAMA_HOST", v.value) + + client, err := ClientFromEnvironment() + if err != v.err { + t.Fatalf("expected %s, got %s", v.err, err) + } + + if client.base.String() != v.expect { + t.Fatalf("expected %s, got %s", v.expect, client.base.String()) + } + }) + } +} + +// testError represents an internal error type with status code and message +// this is used since the error response from the server is not a standard error struct +type testError struct { + message string + statusCode int +} + +func (e testError) Error() string { + return e.message +} + +func TestClientStream(t *testing.T) { + testCases := []struct { + name string + responses []any + wantErr string + }{ + { + name: "immediate error response", + responses: []any{ + testError{ + message: "test error message", + statusCode: http.StatusBadRequest, + }, + }, + wantErr: "test error message", + }, + { + name: "error after successful chunks, ok response", + responses: []any{ + ChatResponse{Message: Message{Content: "partial response 1"}}, + ChatResponse{Message: Message{Content: "partial response 2"}}, + testError{ + message: "mid-stream error", + statusCode: http.StatusOK, + }, + }, + wantErr: "mid-stream error", + }, + { + name: "successful stream completion", + responses: []any{ + ChatResponse{Message: Message{Content: "chunk 1"}}, + ChatResponse{Message: Message{Content: "chunk 2"}}, + ChatResponse{ + Message: Message{Content: "final chunk"}, + Done: true, + DoneReason: "stop", + }, + }, + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + flusher, ok := w.(http.Flusher) + if !ok { + t.Fatal("expected http.Flusher") + } + + w.Header().Set("Content-Type", "application/x-ndjson") + + for _, resp := range tc.responses { + if errResp, ok := resp.(testError); ok { + w.WriteHeader(errResp.statusCode) + err := json.NewEncoder(w).Encode(map[string]string{ + "error": errResp.message, + }) + if err != nil { + t.Fatal("failed to encode error response:", err) + } + return + } + + if err := json.NewEncoder(w).Encode(resp); err != nil { + t.Fatalf("failed to encode response: %v", err) + } + flusher.Flush() + } + })) + defer ts.Close() + + client := NewClient(&url.URL{Scheme: "http", Host: ts.Listener.Addr().String()}, http.DefaultClient) + + var receivedChunks []ChatResponse + err := client.stream(t.Context(), http.MethodPost, "/v1/chat", nil, func(chunk []byte) error { + var resp ChatResponse + if err := json.Unmarshal(chunk, &resp); err != nil { + return fmt.Errorf("failed to unmarshal chunk: %w", err) + } + receivedChunks = append(receivedChunks, resp) + return nil + }) + + if tc.wantErr != "" { + if err == nil { + t.Fatal("expected error but got nil") + } + if !strings.Contains(err.Error(), tc.wantErr) { + t.Errorf("expected error containing %q, got %v", tc.wantErr, err) + } + return + } + if err != nil { + t.Errorf("unexpected error: %v", err) + } + }) + } +} + +func TestClientDo(t *testing.T) { + testCases := []struct { + name string + response any + wantErr string + }{ + { + name: "immediate error response", + response: testError{ + message: "test error message", + statusCode: http.StatusBadRequest, + }, + wantErr: "test error message", + }, + { + name: "server error response", + response: testError{ + message: "internal error", + statusCode: http.StatusInternalServerError, + }, + wantErr: "internal error", + }, + { + name: "successful response", + response: struct { + ID string `json:"id"` + Success bool `json:"success"` + }{ + ID: "msg_123", + Success: true, + }, + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + if errResp, ok := tc.response.(testError); ok { + w.WriteHeader(errResp.statusCode) + err := json.NewEncoder(w).Encode(map[string]string{ + "error": errResp.message, + }) + if err != nil { + t.Fatal("failed to encode error response:", err) + } + return + } + + w.Header().Set("Content-Type", "application/json") + if err := json.NewEncoder(w).Encode(tc.response); err != nil { + t.Fatalf("failed to encode response: %v", err) + } + })) + defer ts.Close() + + client := NewClient(&url.URL{Scheme: "http", Host: ts.Listener.Addr().String()}, http.DefaultClient) + + var resp struct { + ID string `json:"id"` + Success bool `json:"success"` + } + err := client.do(t.Context(), http.MethodPost, "/v1/messages", nil, &resp) + + if tc.wantErr != "" { + if err == nil { + t.Fatalf("got nil, want error %q", tc.wantErr) + } + if err.Error() != tc.wantErr { + t.Errorf("error message mismatch: got %q, want %q", err.Error(), tc.wantErr) + } + return + } + + if err != nil { + t.Fatalf("got error %q, want nil", err) + } + + if expectedResp, ok := tc.response.(struct { + ID string `json:"id"` + Success bool `json:"success"` + }); ok { + if resp.ID != expectedResp.ID { + t.Errorf("response ID mismatch: got %q, want %q", resp.ID, expectedResp.ID) + } + if resp.Success != expectedResp.Success { + t.Errorf("response Success mismatch: got %v, want %v", resp.Success, expectedResp.Success) + } + } + }) + } +} diff --git a/api/examples/README.md b/api/examples/README.md new file mode 100644 index 0000000..e83b536 --- /dev/null +++ b/api/examples/README.md @@ -0,0 +1,18 @@ +# Ollama API Examples + +Run the examples in this directory with: + +```shell +go run example_name/main.go +``` + +## Chat - Chat with a model +- [chat/main.go](chat/main.go) + +## Generate - Generate text from a model +- [generate/main.go](generate/main.go) +- [generate-streaming/main.go](generate-streaming/main.go) + +## Pull - Pull a model +- [pull-progress/main.go](pull-progress/main.go) + diff --git a/api/examples/chat/main.go b/api/examples/chat/main.go new file mode 100644 index 0000000..0743030 --- /dev/null +++ b/api/examples/chat/main.go @@ -0,0 +1,51 @@ +package main + +import ( + "context" + "fmt" + "log" + + "github.com/ollama/ollama/api" +) + +func main() { + client, err := api.ClientFromEnvironment() + if err != nil { + log.Fatal(err) + } + + messages := []api.Message{ + api.Message{ + Role: "system", + Content: "Provide very brief, concise responses", + }, + api.Message{ + Role: "user", + Content: "Name some unusual animals", + }, + api.Message{ + Role: "assistant", + Content: "Monotreme, platypus, echidna", + }, + api.Message{ + Role: "user", + Content: "which of these is the most dangerous?", + }, + } + + ctx := context.Background() + req := &api.ChatRequest{ + Model: "llama3.2", + Messages: messages, + } + + respFunc := func(resp api.ChatResponse) error { + fmt.Print(resp.Message.Content) + return nil + } + + err = client.Chat(ctx, req, respFunc) + if err != nil { + log.Fatal(err) + } +} diff --git a/api/examples/generate-streaming/main.go b/api/examples/generate-streaming/main.go new file mode 100644 index 0000000..3acfb22 --- /dev/null +++ b/api/examples/generate-streaming/main.go @@ -0,0 +1,40 @@ +package main + +import ( + "context" + "fmt" + "log" + + "github.com/ollama/ollama/api" +) + +func main() { + client, err := api.ClientFromEnvironment() + if err != nil { + log.Fatal(err) + } + + // By default, GenerateRequest is streaming. + req := &api.GenerateRequest{ + Model: "gemma2", + Prompt: "how many planets are there?", + } + + ctx := context.Background() + respFunc := func(resp api.GenerateResponse) error { + // Only print the response here; GenerateResponse has a number of other + // interesting fields you want to examine. + + // In streaming mode, responses are partial so we call fmt.Print (and not + // Println) in order to avoid spurious newlines being introduced. The + // model will insert its own newlines if it wants. + fmt.Print(resp.Response) + return nil + } + + err = client.Generate(ctx, req, respFunc) + if err != nil { + log.Fatal(err) + } + fmt.Println() +} diff --git a/api/examples/generate/main.go b/api/examples/generate/main.go new file mode 100644 index 0000000..2fe2874 --- /dev/null +++ b/api/examples/generate/main.go @@ -0,0 +1,37 @@ +package main + +import ( + "context" + "fmt" + "log" + + "github.com/ollama/ollama/api" +) + +func main() { + client, err := api.ClientFromEnvironment() + if err != nil { + log.Fatal(err) + } + + req := &api.GenerateRequest{ + Model: "gemma2", + Prompt: "how many planets are there?", + + // set streaming to false + Stream: new(bool), + } + + ctx := context.Background() + respFunc := func(resp api.GenerateResponse) error { + // Only print the response here; GenerateResponse has a number of other + // interesting fields you want to examine. + fmt.Println(resp.Response) + return nil + } + + err = client.Generate(ctx, req, respFunc) + if err != nil { + log.Fatal(err) + } +} diff --git a/api/examples/multimodal/main.go b/api/examples/multimodal/main.go new file mode 100644 index 0000000..0b0f19e --- /dev/null +++ b/api/examples/multimodal/main.go @@ -0,0 +1,47 @@ +package main + +import ( + "context" + "fmt" + "log" + "os" + + "github.com/ollama/ollama/api" +) + +func main() { + if len(os.Args) <= 1 { + log.Fatal("usage: ") + } + + imgData, err := os.ReadFile(os.Args[1]) + if err != nil { + log.Fatal(err) + } + + client, err := api.ClientFromEnvironment() + if err != nil { + log.Fatal(err) + } + + req := &api.GenerateRequest{ + Model: "llava", + Prompt: "describe this image", + Images: []api.ImageData{imgData}, + } + + ctx := context.Background() + respFunc := func(resp api.GenerateResponse) error { + // In streaming mode, responses are partial so we call fmt.Print (and not + // Println) in order to avoid spurious newlines being introduced. The + // model will insert its own newlines if it wants. + fmt.Print(resp.Response) + return nil + } + + err = client.Generate(ctx, req, respFunc) + if err != nil { + log.Fatal(err) + } + fmt.Println() +} diff --git a/api/examples/pull-progress/main.go b/api/examples/pull-progress/main.go new file mode 100644 index 0000000..7486336 --- /dev/null +++ b/api/examples/pull-progress/main.go @@ -0,0 +1,31 @@ +package main + +import ( + "context" + "fmt" + "log" + + "github.com/ollama/ollama/api" +) + +func main() { + client, err := api.ClientFromEnvironment() + if err != nil { + log.Fatal(err) + } + + ctx := context.Background() + + req := &api.PullRequest{ + Model: "mistral", + } + progressFunc := func(resp api.ProgressResponse) error { + fmt.Printf("Progress: status=%v, total=%v, completed=%v\n", resp.Status, resp.Total, resp.Completed) + return nil + } + + err = client.Pull(ctx, req, progressFunc) + if err != nil { + log.Fatal(err) + } +} diff --git a/api/types.go b/api/types.go new file mode 100644 index 0000000..602f93d --- /dev/null +++ b/api/types.go @@ -0,0 +1,771 @@ +package api + +import ( + "encoding/json" + "fmt" + "log/slog" + "math" + "os" + "reflect" + "strconv" + "strings" + "time" + + "github.com/ollama/ollama/envconfig" + "github.com/ollama/ollama/types/model" +) + +// StatusError is an error with an HTTP status code and message. +type StatusError struct { + StatusCode int + Status string + ErrorMessage string `json:"error"` +} + +func (e StatusError) Error() string { + switch { + case e.Status != "" && e.ErrorMessage != "": + return fmt.Sprintf("%s: %s", e.Status, e.ErrorMessage) + case e.Status != "": + return e.Status + case e.ErrorMessage != "": + return e.ErrorMessage + default: + // this should not happen + return "something went wrong, please see the ollama server logs for details" + } +} + +// ImageData represents the raw binary data of an image file. +type ImageData []byte + +// GenerateRequest describes a request sent by [Client.Generate]. While you +// have to specify the Model and Prompt fields, all the other fields have +// reasonable defaults for basic uses. +type GenerateRequest struct { + // Model is the model name; it should be a name familiar to Ollama from + // the library at https://ollama.com/library + Model string `json:"model"` + + // Prompt is the textual prompt to send to the model. + Prompt string `json:"prompt"` + + // Suffix is the text that comes after the inserted text. + Suffix string `json:"suffix"` + + // System overrides the model's default system message/prompt. + System string `json:"system"` + + // Template overrides the model's default prompt template. + Template string `json:"template"` + + // Context is the context parameter returned from a previous call to + // [Client.Generate]. It can be used to keep a short conversational memory. + Context []int `json:"context,omitempty"` + + // Stream specifies whether the response is streaming; it is true by default. + Stream *bool `json:"stream,omitempty"` + + // Raw set to true means that no formatting will be applied to the prompt. + Raw bool `json:"raw,omitempty"` + + // Format specifies the format to return a response in. + Format json.RawMessage `json:"format,omitempty"` + + // KeepAlive controls how long the model will stay loaded in memory following + // this request. + KeepAlive *Duration `json:"keep_alive,omitempty"` + + // Images is an optional list of raw image bytes accompanying this + // request, for multimodal models. + Images []ImageData `json:"images,omitempty"` + + // Options lists model-specific options. For example, temperature can be + // set through this field, if the model supports it. + Options map[string]any `json:"options"` +} + +// ChatRequest describes a request sent by [Client.Chat]. +type ChatRequest struct { + // Model is the model name, as in [GenerateRequest]. + Model string `json:"model"` + + // Messages is the messages of the chat - can be used to keep a chat memory. + Messages []Message `json:"messages"` + + // Stream enables streaming of returned responses; true by default. + Stream *bool `json:"stream,omitempty"` + + // Format is the format to return the response in (e.g. "json"). + Format json.RawMessage `json:"format,omitempty"` + + // KeepAlive controls how long the model will stay loaded into memory + // following the request. + KeepAlive *Duration `json:"keep_alive,omitempty"` + + // Tools is an optional list of tools the model has access to. + Tools `json:"tools,omitempty"` + + // Options lists model-specific options. + Options map[string]any `json:"options"` +} + +type Tools []Tool + +func (t Tools) String() string { + bts, _ := json.Marshal(t) + return string(bts) +} + +func (t Tool) String() string { + bts, _ := json.Marshal(t) + return string(bts) +} + +// Message is a single message in a chat sequence. The message contains the +// role ("system", "user", or "assistant"), the content and an optional list +// of images. +type Message struct { + Role string `json:"role"` + Content string `json:"content"` + Images []ImageData `json:"images,omitempty"` + ToolCalls []ToolCall `json:"tool_calls,omitempty"` +} + +func (m *Message) UnmarshalJSON(b []byte) error { + type Alias Message + var a Alias + if err := json.Unmarshal(b, &a); err != nil { + return err + } + + *m = Message(a) + m.Role = strings.ToLower(m.Role) + return nil +} + +type ToolCall struct { + Function ToolCallFunction `json:"function"` +} + +type ToolCallFunction struct { + Index int `json:"index,omitempty"` + Name string `json:"name"` + Arguments ToolCallFunctionArguments `json:"arguments"` +} + +type ToolCallFunctionArguments map[string]any + +func (t *ToolCallFunctionArguments) String() string { + bts, _ := json.Marshal(t) + return string(bts) +} + +type Tool struct { + Type string `json:"type"` + Items any `json:"items,omitempty"` + Function ToolFunction `json:"function"` +} + +// PropertyType can be either a string or an array of strings +type PropertyType []string + +// UnmarshalJSON implements the json.Unmarshaler interface +func (pt *PropertyType) UnmarshalJSON(data []byte) error { + // Try to unmarshal as a string first + var s string + if err := json.Unmarshal(data, &s); err == nil { + *pt = []string{s} + return nil + } + + // If that fails, try to unmarshal as an array of strings + var a []string + if err := json.Unmarshal(data, &a); err != nil { + return err + } + *pt = a + return nil +} + +// MarshalJSON implements the json.Marshaler interface +func (pt PropertyType) MarshalJSON() ([]byte, error) { + if len(pt) == 1 { + // If there's only one type, marshal as a string + return json.Marshal(pt[0]) + } + // Otherwise marshal as an array + return json.Marshal([]string(pt)) +} + +// String returns a string representation of the PropertyType +func (pt PropertyType) String() string { + if len(pt) == 0 { + return "" + } + if len(pt) == 1 { + return pt[0] + } + return fmt.Sprintf("%v", []string(pt)) +} + +type ToolFunction struct { + Name string `json:"name"` + Description string `json:"description"` + Parameters struct { + Type string `json:"type"` + Defs any `json:"$defs,omitempty"` + Items any `json:"items,omitempty"` + Required []string `json:"required"` + Properties map[string]struct { + Type PropertyType `json:"type"` + Items any `json:"items,omitempty"` + Description string `json:"description"` + Enum []any `json:"enum,omitempty"` + } `json:"properties"` + } `json:"parameters"` +} + +func (t *ToolFunction) String() string { + bts, _ := json.Marshal(t) + return string(bts) +} + +// ChatResponse is the response returned by [Client.Chat]. Its fields are +// similar to [GenerateResponse]. +type ChatResponse struct { + Model string `json:"model"` + CreatedAt time.Time `json:"created_at"` + Message Message `json:"message"` + DoneReason string `json:"done_reason,omitempty"` + + Done bool `json:"done"` + + Metrics +} + +type Metrics struct { + TotalDuration time.Duration `json:"total_duration,omitempty"` + LoadDuration time.Duration `json:"load_duration,omitempty"` + PromptEvalCount int `json:"prompt_eval_count,omitempty"` + PromptEvalDuration time.Duration `json:"prompt_eval_duration,omitempty"` + EvalCount int `json:"eval_count,omitempty"` + EvalDuration time.Duration `json:"eval_duration,omitempty"` +} + +// Options specified in [GenerateRequest]. If you add a new option here, also +// add it to the API docs. +type Options struct { + Runner + + // Predict options used at runtime + NumKeep int `json:"num_keep,omitempty"` + Seed int `json:"seed,omitempty"` + NumPredict int `json:"num_predict,omitempty"` + TopK int `json:"top_k,omitempty"` + TopP float32 `json:"top_p,omitempty"` + MinP float32 `json:"min_p,omitempty"` + TypicalP float32 `json:"typical_p,omitempty"` + RepeatLastN int `json:"repeat_last_n,omitempty"` + Temperature float32 `json:"temperature,omitempty"` + RepeatPenalty float32 `json:"repeat_penalty,omitempty"` + PresencePenalty float32 `json:"presence_penalty,omitempty"` + FrequencyPenalty float32 `json:"frequency_penalty,omitempty"` + Stop []string `json:"stop,omitempty"` +} + +// Runner options which must be set when the model is loaded into memory +type Runner struct { + NumCtx int `json:"num_ctx,omitempty"` + NumBatch int `json:"num_batch,omitempty"` + NumGPU int `json:"num_gpu,omitempty"` + MainGPU int `json:"main_gpu,omitempty"` + UseMMap *bool `json:"use_mmap,omitempty"` + NumThread int `json:"num_thread,omitempty"` +} + +// EmbedRequest is the request passed to [Client.Embed]. +type EmbedRequest struct { + // Model is the model name. + Model string `json:"model"` + + // Input is the input to embed. + Input any `json:"input"` + + // KeepAlive controls how long the model will stay loaded in memory following + // this request. + KeepAlive *Duration `json:"keep_alive,omitempty"` + + Truncate *bool `json:"truncate,omitempty"` + + // Options lists model-specific options. + Options map[string]any `json:"options"` +} + +// EmbedResponse is the response from [Client.Embed]. +type EmbedResponse struct { + Model string `json:"model"` + Embeddings [][]float32 `json:"embeddings"` + + TotalDuration time.Duration `json:"total_duration,omitempty"` + LoadDuration time.Duration `json:"load_duration,omitempty"` + PromptEvalCount int `json:"prompt_eval_count,omitempty"` +} + +// EmbeddingRequest is the request passed to [Client.Embeddings]. +type EmbeddingRequest struct { + // Model is the model name. + Model string `json:"model"` + + // Prompt is the textual prompt to embed. + Prompt string `json:"prompt"` + + // KeepAlive controls how long the model will stay loaded in memory following + // this request. + KeepAlive *Duration `json:"keep_alive,omitempty"` + + // Options lists model-specific options. + Options map[string]any `json:"options"` +} + +// EmbeddingResponse is the response from [Client.Embeddings]. +type EmbeddingResponse struct { + Embedding []float64 `json:"embedding"` +} + +// CreateRequest is the request passed to [Client.Create]. +type CreateRequest struct { + Model string `json:"model"` + Stream *bool `json:"stream,omitempty"` + Quantize string `json:"quantize,omitempty"` + + From string `json:"from,omitempty"` + Files map[string]string `json:"files,omitempty"` + Adapters map[string]string `json:"adapters,omitempty"` + Template string `json:"template,omitempty"` + License any `json:"license,omitempty"` + System string `json:"system,omitempty"` + Parameters map[string]any `json:"parameters,omitempty"` + Messages []Message `json:"messages,omitempty"` + + // Deprecated: set the model name with Model instead + Name string `json:"name"` + // Deprecated: use Quantize instead + Quantization string `json:"quantization,omitempty"` +} + +// DeleteRequest is the request passed to [Client.Delete]. +type DeleteRequest struct { + Model string `json:"model"` + + // Deprecated: set the model name with Model instead + Name string `json:"name"` +} + +// ShowRequest is the request passed to [Client.Show]. +type ShowRequest struct { + Model string `json:"model"` + System string `json:"system"` + + // Template is deprecated + Template string `json:"template"` + Verbose bool `json:"verbose"` + + Options map[string]any `json:"options"` + + // Deprecated: set the model name with Model instead + Name string `json:"name"` +} + +// ShowResponse is the response returned from [Client.Show]. +type ShowResponse struct { + License string `json:"license,omitempty"` + Modelfile string `json:"modelfile,omitempty"` + Parameters string `json:"parameters,omitempty"` + Template string `json:"template,omitempty"` + System string `json:"system,omitempty"` + Details ModelDetails `json:"details,omitempty"` + Messages []Message `json:"messages,omitempty"` + ModelInfo map[string]any `json:"model_info,omitempty"` + ProjectorInfo map[string]any `json:"projector_info,omitempty"` + Tensors []Tensor `json:"tensors,omitempty"` + Capabilities []model.Capability `json:"capabilities,omitempty"` + ModifiedAt time.Time `json:"modified_at,omitempty"` +} + +// CopyRequest is the request passed to [Client.Copy]. +type CopyRequest struct { + Source string `json:"source"` + Destination string `json:"destination"` +} + +// PullRequest is the request passed to [Client.Pull]. +type PullRequest struct { + Model string `json:"model"` + Insecure bool `json:"insecure,omitempty"` // Deprecated: ignored + Username string `json:"username"` // Deprecated: ignored + Password string `json:"password"` // Deprecated: ignored + Stream *bool `json:"stream,omitempty"` + + // Deprecated: set the model name with Model instead + Name string `json:"name"` +} + +// ProgressResponse is the response passed to progress functions like +// [PullProgressFunc] and [PushProgressFunc]. +type ProgressResponse struct { + Status string `json:"status"` + Digest string `json:"digest,omitempty"` + Total int64 `json:"total,omitempty"` + Completed int64 `json:"completed,omitempty"` +} + +// PushRequest is the request passed to [Client.Push]. +type PushRequest struct { + Model string `json:"model"` + Insecure bool `json:"insecure,omitempty"` + Username string `json:"username"` + Password string `json:"password"` + Stream *bool `json:"stream,omitempty"` + + // Deprecated: set the model name with Model instead + Name string `json:"name"` +} + +// ListResponse is the response from [Client.List]. +type ListResponse struct { + Models []ListModelResponse `json:"models"` +} + +// ProcessResponse is the response from [Client.Process]. +type ProcessResponse struct { + Models []ProcessModelResponse `json:"models"` +} + +// ListModelResponse is a single model description in [ListResponse]. +type ListModelResponse struct { + Name string `json:"name"` + Model string `json:"model"` + ModifiedAt time.Time `json:"modified_at"` + Size int64 `json:"size"` + Digest string `json:"digest"` + Details ModelDetails `json:"details,omitempty"` +} + +// ProcessModelResponse is a single model description in [ProcessResponse]. +type ProcessModelResponse struct { + Name string `json:"name"` + Model string `json:"model"` + Size int64 `json:"size"` + Digest string `json:"digest"` + Details ModelDetails `json:"details,omitempty"` + ExpiresAt time.Time `json:"expires_at"` + SizeVRAM int64 `json:"size_vram"` +} + +type TokenResponse struct { + Token string `json:"token"` +} + +// GenerateResponse is the response passed into [GenerateResponseFunc]. +type GenerateResponse struct { + // Model is the model name that generated the response. + Model string `json:"model"` + + // CreatedAt is the timestamp of the response. + CreatedAt time.Time `json:"created_at"` + + // Response is the textual response itself. + Response string `json:"response"` + + // Done specifies if the response is complete. + Done bool `json:"done"` + + // DoneReason is the reason the model stopped generating text. + DoneReason string `json:"done_reason,omitempty"` + + // Context is an encoding of the conversation used in this response; this + // can be sent in the next request to keep a conversational memory. + Context []int `json:"context,omitempty"` + + Metrics +} + +// ModelDetails provides details about a model. +type ModelDetails struct { + ParentModel string `json:"parent_model"` + Format string `json:"format"` + Family string `json:"family"` + Families []string `json:"families"` + ParameterSize string `json:"parameter_size"` + QuantizationLevel string `json:"quantization_level"` +} + +// Tensor describes the metadata for a given tensor. +type Tensor struct { + Name string `json:"name"` + Type string `json:"type"` + Shape []uint64 `json:"shape"` +} + +func (m *Metrics) Summary() { + if m.TotalDuration > 0 { + fmt.Fprintf(os.Stderr, "total duration: %v\n", m.TotalDuration) + } + + if m.LoadDuration > 0 { + fmt.Fprintf(os.Stderr, "load duration: %v\n", m.LoadDuration) + } + + if m.PromptEvalCount > 0 { + fmt.Fprintf(os.Stderr, "prompt eval count: %d token(s)\n", m.PromptEvalCount) + } + + if m.PromptEvalDuration > 0 { + fmt.Fprintf(os.Stderr, "prompt eval duration: %s\n", m.PromptEvalDuration) + fmt.Fprintf(os.Stderr, "prompt eval rate: %.2f tokens/s\n", float64(m.PromptEvalCount)/m.PromptEvalDuration.Seconds()) + } + + if m.EvalCount > 0 { + fmt.Fprintf(os.Stderr, "eval count: %d token(s)\n", m.EvalCount) + } + + if m.EvalDuration > 0 { + fmt.Fprintf(os.Stderr, "eval duration: %s\n", m.EvalDuration) + fmt.Fprintf(os.Stderr, "eval rate: %.2f tokens/s\n", float64(m.EvalCount)/m.EvalDuration.Seconds()) + } +} + +func (opts *Options) FromMap(m map[string]any) error { + valueOpts := reflect.ValueOf(opts).Elem() // names of the fields in the options struct + typeOpts := reflect.TypeOf(opts).Elem() // types of the fields in the options struct + + // build map of json struct tags to their types + jsonOpts := make(map[string]reflect.StructField) + for _, field := range reflect.VisibleFields(typeOpts) { + jsonTag := strings.Split(field.Tag.Get("json"), ",")[0] + if jsonTag != "" { + jsonOpts[jsonTag] = field + } + } + + for key, val := range m { + opt, ok := jsonOpts[key] + if !ok { + slog.Warn("invalid option provided", "option", key) + continue + } + + field := valueOpts.FieldByName(opt.Name) + if field.IsValid() && field.CanSet() { + if val == nil { + continue + } + + switch field.Kind() { + case reflect.Int: + switch t := val.(type) { + case int64: + field.SetInt(t) + case float64: + // when JSON unmarshals numbers, it uses float64, not int + field.SetInt(int64(t)) + default: + return fmt.Errorf("option %q must be of type integer", key) + } + case reflect.Bool: + val, ok := val.(bool) + if !ok { + return fmt.Errorf("option %q must be of type boolean", key) + } + field.SetBool(val) + case reflect.Float32: + // JSON unmarshals to float64 + val, ok := val.(float64) + if !ok { + return fmt.Errorf("option %q must be of type float32", key) + } + field.SetFloat(val) + case reflect.String: + val, ok := val.(string) + if !ok { + return fmt.Errorf("option %q must be of type string", key) + } + field.SetString(val) + case reflect.Slice: + // JSON unmarshals to []any, not []string + val, ok := val.([]any) + if !ok { + return fmt.Errorf("option %q must be of type array", key) + } + // convert []any to []string + slice := make([]string, len(val)) + for i, item := range val { + str, ok := item.(string) + if !ok { + return fmt.Errorf("option %q must be of an array of strings", key) + } + slice[i] = str + } + field.Set(reflect.ValueOf(slice)) + case reflect.Pointer: + var b bool + if field.Type() == reflect.TypeOf(&b) { + val, ok := val.(bool) + if !ok { + return fmt.Errorf("option %q must be of type boolean", key) + } + field.Set(reflect.ValueOf(&val)) + } else { + return fmt.Errorf("unknown type loading config params: %v %v", field.Kind(), field.Type()) + } + default: + return fmt.Errorf("unknown type loading config params: %v", field.Kind()) + } + } + } + + return nil +} + +// DefaultOptions is the default set of options for [GenerateRequest]; these +// values are used unless the user specifies other values explicitly. +func DefaultOptions() Options { + return Options{ + // options set on request to runner + NumPredict: -1, + + // set a minimal num_keep to avoid issues on context shifts + NumKeep: 4, + Temperature: 0.8, + TopK: 40, + TopP: 0.9, + TypicalP: 1.0, + RepeatLastN: 64, + RepeatPenalty: 1.1, + PresencePenalty: 0.0, + FrequencyPenalty: 0.0, + Seed: -1, + + Runner: Runner{ + // options set when the model is loaded + NumCtx: int(envconfig.ContextLength()), + NumBatch: 512, + NumGPU: -1, // -1 here indicates that NumGPU should be set dynamically + NumThread: 0, // let the runtime decide + UseMMap: nil, + }, + } +} + +type Duration struct { + time.Duration +} + +func (d Duration) MarshalJSON() ([]byte, error) { + if d.Duration < 0 { + return []byte("-1"), nil + } + return []byte("\"" + d.Duration.String() + "\""), nil +} + +func (d *Duration) UnmarshalJSON(b []byte) (err error) { + var v any + if err := json.Unmarshal(b, &v); err != nil { + return err + } + + d.Duration = 5 * time.Minute + + switch t := v.(type) { + case float64: + if t < 0 { + d.Duration = time.Duration(math.MaxInt64) + } else { + d.Duration = time.Duration(int(t) * int(time.Second)) + } + case string: + d.Duration, err = time.ParseDuration(t) + if err != nil { + return err + } + if d.Duration < 0 { + d.Duration = time.Duration(math.MaxInt64) + } + default: + return fmt.Errorf("Unsupported type: '%s'", reflect.TypeOf(v)) + } + + return nil +} + +// FormatParams converts specified parameter options to their correct types +func FormatParams(params map[string][]string) (map[string]any, error) { + opts := Options{} + valueOpts := reflect.ValueOf(&opts).Elem() // names of the fields in the options struct + typeOpts := reflect.TypeOf(opts) // types of the fields in the options struct + + // build map of json struct tags to their types + jsonOpts := make(map[string]reflect.StructField) + for _, field := range reflect.VisibleFields(typeOpts) { + jsonTag := strings.Split(field.Tag.Get("json"), ",")[0] + if jsonTag != "" { + jsonOpts[jsonTag] = field + } + } + + out := make(map[string]any) + // iterate params and set values based on json struct tags + for key, vals := range params { + if opt, ok := jsonOpts[key]; !ok { + return nil, fmt.Errorf("unknown parameter '%s'", key) + } else { + field := valueOpts.FieldByName(opt.Name) + if field.IsValid() && field.CanSet() { + switch field.Kind() { + case reflect.Float32: + floatVal, err := strconv.ParseFloat(vals[0], 32) + if err != nil { + return nil, fmt.Errorf("invalid float value %s", vals) + } + + out[key] = float32(floatVal) + case reflect.Int: + intVal, err := strconv.ParseInt(vals[0], 10, 64) + if err != nil { + return nil, fmt.Errorf("invalid int value %s", vals) + } + + out[key] = intVal + case reflect.Bool: + boolVal, err := strconv.ParseBool(vals[0]) + if err != nil { + return nil, fmt.Errorf("invalid bool value %s", vals) + } + + out[key] = boolVal + case reflect.String: + out[key] = vals[0] + case reflect.Slice: + // TODO: only string slices are supported right now + out[key] = vals + case reflect.Pointer: + var b bool + if field.Type() == reflect.TypeOf(&b) { + boolVal, err := strconv.ParseBool(vals[0]) + if err != nil { + return nil, fmt.Errorf("invalid bool value %s", vals) + } + out[key] = &boolVal + } else { + return nil, fmt.Errorf("unknown type %s for %s", field.Kind(), key) + } + default: + return nil, fmt.Errorf("unknown type %s for %s", field.Kind(), key) + } + } + } + } + + return out, nil +} diff --git a/api/types_test.go b/api/types_test.go new file mode 100644 index 0000000..1a6fc81 --- /dev/null +++ b/api/types_test.go @@ -0,0 +1,374 @@ +package api + +import ( + "encoding/json" + "errors" + "math" + "testing" + "time" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestKeepAliveParsingFromJSON(t *testing.T) { + tests := []struct { + name string + req string + exp *Duration + }{ + { + name: "Positive Integer", + req: `{ "keep_alive": 42 }`, + exp: &Duration{42 * time.Second}, + }, + { + name: "Positive Float", + req: `{ "keep_alive": 42.5 }`, + exp: &Duration{42 * time.Second}, + }, + { + name: "Positive Integer String", + req: `{ "keep_alive": "42m" }`, + exp: &Duration{42 * time.Minute}, + }, + { + name: "Negative Integer", + req: `{ "keep_alive": -1 }`, + exp: &Duration{math.MaxInt64}, + }, + { + name: "Negative Float", + req: `{ "keep_alive": -3.14 }`, + exp: &Duration{math.MaxInt64}, + }, + { + name: "Negative Integer String", + req: `{ "keep_alive": "-1m" }`, + exp: &Duration{math.MaxInt64}, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + var dec ChatRequest + err := json.Unmarshal([]byte(test.req), &dec) + require.NoError(t, err) + + assert.Equal(t, test.exp, dec.KeepAlive) + }) + } +} + +func TestDurationMarshalUnmarshal(t *testing.T) { + tests := []struct { + name string + input time.Duration + expected time.Duration + }{ + { + "negative duration", + time.Duration(-1), + time.Duration(math.MaxInt64), + }, + { + "positive duration", + 42 * time.Second, + 42 * time.Second, + }, + { + "another positive duration", + 42 * time.Minute, + 42 * time.Minute, + }, + { + "zero duration", + time.Duration(0), + time.Duration(0), + }, + { + "max duration", + time.Duration(math.MaxInt64), + time.Duration(math.MaxInt64), + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + b, err := json.Marshal(Duration{test.input}) + require.NoError(t, err) + + var d Duration + err = json.Unmarshal(b, &d) + require.NoError(t, err) + + assert.Equal(t, test.expected, d.Duration, "input %v, marshalled %v, got %v", test.input, string(b), d.Duration) + }) + } +} + +func TestUseMmapParsingFromJSON(t *testing.T) { + tr := true + fa := false + tests := []struct { + name string + req string + exp *bool + }{ + { + name: "Undefined", + req: `{ }`, + exp: nil, + }, + { + name: "True", + req: `{ "use_mmap": true }`, + exp: &tr, + }, + { + name: "False", + req: `{ "use_mmap": false }`, + exp: &fa, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + var oMap map[string]any + err := json.Unmarshal([]byte(test.req), &oMap) + require.NoError(t, err) + opts := DefaultOptions() + err = opts.FromMap(oMap) + require.NoError(t, err) + assert.Equal(t, test.exp, opts.UseMMap) + }) + } +} + +func TestUseMmapFormatParams(t *testing.T) { + tr := true + fa := false + tests := []struct { + name string + req map[string][]string + exp *bool + err error + }{ + { + name: "True", + req: map[string][]string{ + "use_mmap": {"true"}, + }, + exp: &tr, + err: nil, + }, + { + name: "False", + req: map[string][]string{ + "use_mmap": {"false"}, + }, + exp: &fa, + err: nil, + }, + { + name: "Numeric True", + req: map[string][]string{ + "use_mmap": {"1"}, + }, + exp: &tr, + err: nil, + }, + { + name: "Numeric False", + req: map[string][]string{ + "use_mmap": {"0"}, + }, + exp: &fa, + err: nil, + }, + { + name: "invalid string", + req: map[string][]string{ + "use_mmap": {"foo"}, + }, + exp: nil, + err: errors.New("invalid bool value [foo]"), + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + resp, err := FormatParams(test.req) + require.Equal(t, test.err, err) + respVal, ok := resp["use_mmap"] + if test.exp != nil { + assert.True(t, ok, "resp: %v", resp) + assert.Equal(t, *test.exp, *respVal.(*bool)) + } + }) + } +} + +func TestMessage_UnmarshalJSON(t *testing.T) { + tests := []struct { + input string + expected string + }{ + {`{"role": "USER", "content": "Hello!"}`, "user"}, + {`{"role": "System", "content": "Initialization complete."}`, "system"}, + {`{"role": "assistant", "content": "How can I help you?"}`, "assistant"}, + {`{"role": "TOOl", "content": "Access granted."}`, "tool"}, + } + + for _, test := range tests { + var msg Message + if err := json.Unmarshal([]byte(test.input), &msg); err != nil { + t.Errorf("Unexpected error: %v", err) + } + + if msg.Role != test.expected { + t.Errorf("role not lowercased: got %v, expected %v", msg.Role, test.expected) + } + } +} + +func TestToolFunction_UnmarshalJSON(t *testing.T) { + tests := []struct { + name string + input string + wantErr string + }{ + { + name: "valid enum with same types", + input: `{ + "name": "test", + "description": "test function", + "parameters": { + "type": "object", + "required": ["test"], + "properties": { + "test": { + "type": "string", + "description": "test prop", + "enum": ["a", "b", "c"] + } + } + } + }`, + wantErr: "", + }, + { + name: "empty enum array", + input: `{ + "name": "test", + "description": "test function", + "parameters": { + "type": "object", + "required": ["test"], + "properties": { + "test": { + "type": "string", + "description": "test prop", + "enum": [] + } + } + } + }`, + wantErr: "", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + var tf ToolFunction + err := json.Unmarshal([]byte(tt.input), &tf) + + if tt.wantErr != "" { + require.Error(t, err) + assert.Contains(t, err.Error(), tt.wantErr) + } else { + require.NoError(t, err) + } + }) + } +} + +func TestPropertyType_UnmarshalJSON(t *testing.T) { + tests := []struct { + name string + input string + expected PropertyType + }{ + { + name: "string type", + input: `"string"`, + expected: PropertyType{"string"}, + }, + { + name: "array of types", + input: `["string", "number"]`, + expected: PropertyType{"string", "number"}, + }, + { + name: "array with single type", + input: `["string"]`, + expected: PropertyType{"string"}, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + var pt PropertyType + if err := json.Unmarshal([]byte(test.input), &pt); err != nil { + t.Errorf("Unexpected error: %v", err) + } + + if len(pt) != len(test.expected) { + t.Errorf("Length mismatch: got %v, expected %v", len(pt), len(test.expected)) + } + + for i, v := range pt { + if v != test.expected[i] { + t.Errorf("Value mismatch at index %d: got %v, expected %v", i, v, test.expected[i]) + } + } + }) + } +} + +func TestPropertyType_MarshalJSON(t *testing.T) { + tests := []struct { + name string + input PropertyType + expected string + }{ + { + name: "single type", + input: PropertyType{"string"}, + expected: `"string"`, + }, + { + name: "multiple types", + input: PropertyType{"string", "number"}, + expected: `["string","number"]`, + }, + { + name: "empty type", + input: PropertyType{}, + expected: `[]`, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + data, err := json.Marshal(test.input) + if err != nil { + t.Errorf("Unexpected error: %v", err) + } + + if string(data) != test.expected { + t.Errorf("Marshaled data mismatch: got %v, expected %v", string(data), test.expected) + } + }) + } +} diff --git a/app/.gitignore b/app/.gitignore new file mode 100644 index 0000000..0aa2479 --- /dev/null +++ b/app/.gitignore @@ -0,0 +1 @@ +ollama.syso diff --git a/app/README.md b/app/README.md new file mode 100644 index 0000000..433ee44 --- /dev/null +++ b/app/README.md @@ -0,0 +1,22 @@ +# Ollama App + +## Linux + +TODO + +## MacOS + +TODO + +## Windows + +If you want to build the installer, youll need to install +- https://jrsoftware.org/isinfo.php + + +In the top directory of this repo, run the following powershell script +to build the ollama CLI, ollama app, and ollama installer. + +```powershell +powershell -ExecutionPolicy Bypass -File .\scripts\build_windows.ps1 +``` diff --git a/app/assets/app.ico b/app/assets/app.ico new file mode 100644 index 0000000..875924f Binary files /dev/null and b/app/assets/app.ico differ diff --git a/app/assets/assets.go b/app/assets/assets.go new file mode 100644 index 0000000..6fed2d0 --- /dev/null +++ b/app/assets/assets.go @@ -0,0 +1,17 @@ +package assets + +import ( + "embed" + "io/fs" +) + +//go:embed *.ico +var icons embed.FS + +func ListIcons() ([]string, error) { + return fs.Glob(icons, "*") +} + +func GetIcon(filename string) ([]byte, error) { + return icons.ReadFile(filename) +} diff --git a/app/assets/setup.bmp b/app/assets/setup.bmp new file mode 100644 index 0000000..ff58b90 Binary files /dev/null and b/app/assets/setup.bmp differ diff --git a/app/assets/tray.ico b/app/assets/tray.ico new file mode 100644 index 0000000..e63616c Binary files /dev/null and b/app/assets/tray.ico differ diff --git a/app/assets/tray_upgrade.ico b/app/assets/tray_upgrade.ico new file mode 100644 index 0000000..d208305 Binary files /dev/null and b/app/assets/tray_upgrade.ico differ diff --git a/app/lifecycle/getstarted_nonwindows.go b/app/lifecycle/getstarted_nonwindows.go new file mode 100644 index 0000000..2af87ab --- /dev/null +++ b/app/lifecycle/getstarted_nonwindows.go @@ -0,0 +1,9 @@ +//go:build !windows + +package lifecycle + +import "errors" + +func GetStarted() error { + return errors.New("not implemented") +} diff --git a/app/lifecycle/getstarted_windows.go b/app/lifecycle/getstarted_windows.go new file mode 100644 index 0000000..f39dc31 --- /dev/null +++ b/app/lifecycle/getstarted_windows.go @@ -0,0 +1,43 @@ +package lifecycle + +import ( + "fmt" + "log/slog" + "os" + "os/exec" + "path/filepath" + "syscall" +) + +func GetStarted() error { + const CREATE_NEW_CONSOLE = 0x00000010 + var err error + bannerScript := filepath.Join(AppDir, "ollama_welcome.ps1") + args := []string{ + // TODO once we're signed, the execution policy bypass should be removed + "powershell", "-noexit", "-ExecutionPolicy", "Bypass", "-nologo", "-file", bannerScript, + } + args[0], err = exec.LookPath(args[0]) + if err != nil { + return err + } + + // Make sure the script actually exists + _, err = os.Stat(bannerScript) + if err != nil { + return fmt.Errorf("getting started banner script error %s", err) + } + + slog.Info(fmt.Sprintf("opening getting started terminal with %v", args)) + attrs := &os.ProcAttr{ + Files: []*os.File{os.Stdin, os.Stdout, os.Stderr}, + Sys: &syscall.SysProcAttr{CreationFlags: CREATE_NEW_CONSOLE, HideWindow: false}, + } + proc, err := os.StartProcess(args[0], args, attrs) + if err != nil { + return fmt.Errorf("unable to start getting started shell %w", err) + } + + slog.Debug(fmt.Sprintf("getting started terminal PID: %d", proc.Pid)) + return proc.Release() +} diff --git a/app/lifecycle/lifecycle.go b/app/lifecycle/lifecycle.go new file mode 100644 index 0000000..c24fe64 --- /dev/null +++ b/app/lifecycle/lifecycle.go @@ -0,0 +1,94 @@ +package lifecycle + +import ( + "context" + "fmt" + "log" + "log/slog" + "os" + "os/signal" + "syscall" + + "github.com/ollama/ollama/app/store" + "github.com/ollama/ollama/app/tray" + "github.com/ollama/ollama/envconfig" +) + +func Run() { + InitLogging() + slog.Info("app config", "env", envconfig.Values()) + + ctx, cancel := context.WithCancel(context.Background()) + var done chan int + + t, err := tray.NewTray() + if err != nil { + log.Fatalf("Failed to start: %s", err) + } + callbacks := t.GetCallbacks() + + signals := make(chan os.Signal, 1) + signal.Notify(signals, syscall.SIGINT, syscall.SIGTERM) + + go func() { + slog.Debug("starting callback loop") + for { + select { + case <-callbacks.Quit: + slog.Debug("quit called") + t.Quit() + case <-signals: + slog.Debug("shutting down due to signal") + t.Quit() + case <-callbacks.Update: + err := DoUpgrade(cancel, done) + if err != nil { + slog.Warn(fmt.Sprintf("upgrade attempt failed: %s", err)) + } + case <-callbacks.ShowLogs: + ShowLogs() + case <-callbacks.DoFirstUse: + err := GetStarted() + if err != nil { + slog.Warn(fmt.Sprintf("Failed to launch getting started shell: %s", err)) + } + } + } + }() + + // Are we first use? + if !store.GetFirstTimeRun() { + slog.Debug("First time run") + err = t.DisplayFirstUseNotification() + if err != nil { + slog.Debug(fmt.Sprintf("XXX failed to display first use notification %v", err)) + } + store.SetFirstTimeRun(true) + } else { + slog.Debug("Not first time, skipping first run notification") + } + + if IsServerRunning(ctx) { + slog.Info("Detected another instance of ollama running, exiting") + os.Exit(1) + } else { + done, err = SpawnServer(ctx, CLIName) + if err != nil { + // TODO - should we retry in a backoff loop? + // TODO - should we pop up a warning and maybe add a menu item to view application logs? + slog.Error(fmt.Sprintf("Failed to spawn ollama server %s", err)) + done = make(chan int, 1) + done <- 1 + } + } + + StartBackgroundUpdaterChecker(ctx, t.UpdateAvailable) + + t.Run() + cancel() + slog.Info("Waiting for ollama server to shutdown...") + if done != nil { + <-done + } + slog.Info("Ollama app exiting") +} diff --git a/app/lifecycle/logging.go b/app/lifecycle/logging.go new file mode 100644 index 0000000..22e3de1 --- /dev/null +++ b/app/lifecycle/logging.go @@ -0,0 +1,62 @@ +package lifecycle + +import ( + "fmt" + "log/slog" + "os" + "strconv" + "strings" + + "github.com/ollama/ollama/envconfig" + "github.com/ollama/ollama/logutil" +) + +func InitLogging() { + var logFile *os.File + var err error + // Detect if we're a GUI app on windows, and if not, send logs to console + if os.Stderr.Fd() != 0 { + // Console app detected + logFile = os.Stderr + // TODO - write one-line to the app.log file saying we're running in console mode to help avoid confusion + } else { + rotateLogs(AppLogFile) + logFile, err = os.OpenFile(AppLogFile, os.O_APPEND|os.O_WRONLY|os.O_CREATE, 0o755) + if err != nil { + slog.Error(fmt.Sprintf("failed to create server log %v", err)) + return + } + } + + slog.SetDefault(logutil.NewLogger(logFile, envconfig.LogLevel())) + slog.Info("ollama app started") +} + +func rotateLogs(logFile string) { + if _, err := os.Stat(logFile); os.IsNotExist(err) { + return + } + index := strings.LastIndex(logFile, ".") + pre := logFile[:index] + post := "." + logFile[index+1:] + for i := LogRotationCount; i > 0; i-- { + older := pre + "-" + strconv.Itoa(i) + post + newer := pre + "-" + strconv.Itoa(i-1) + post + if i == 1 { + newer = pre + post + } + if _, err := os.Stat(newer); err == nil { + if _, err := os.Stat(older); err == nil { + err := os.Remove(older) + if err != nil { + slog.Warn("Failed to remove older log", "older", older, "error", err) + continue + } + } + err := os.Rename(newer, older) + if err != nil { + slog.Warn("Failed to rotate log", "older", older, "newer", newer, "error", err) + } + } + } +} diff --git a/app/lifecycle/logging_nonwindows.go b/app/lifecycle/logging_nonwindows.go new file mode 100644 index 0000000..205e47d --- /dev/null +++ b/app/lifecycle/logging_nonwindows.go @@ -0,0 +1,9 @@ +//go:build !windows + +package lifecycle + +import "log/slog" + +func ShowLogs() { + slog.Warn("not implemented") +} diff --git a/app/lifecycle/logging_test.go b/app/lifecycle/logging_test.go new file mode 100644 index 0000000..8d5cdf6 --- /dev/null +++ b/app/lifecycle/logging_test.go @@ -0,0 +1,44 @@ +package lifecycle + +import ( + "os" + "path/filepath" + "strconv" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestRotateLogs(t *testing.T) { + logDir := t.TempDir() + logFile := filepath.Join(logDir, "testlog.log") + + // No log exists + rotateLogs(logFile) + + require.NoError(t, os.WriteFile(logFile, []byte("1"), 0o644)) + assert.FileExists(t, logFile) + // First rotation + rotateLogs(logFile) + assert.FileExists(t, filepath.Join(logDir, "testlog-1.log")) + assert.NoFileExists(t, filepath.Join(logDir, "testlog-2.log")) + assert.NoFileExists(t, logFile) + + // Should be a no-op without a new log + rotateLogs(logFile) + assert.FileExists(t, filepath.Join(logDir, "testlog-1.log")) + assert.NoFileExists(t, filepath.Join(logDir, "testlog-2.log")) + assert.NoFileExists(t, logFile) + + for i := 2; i <= LogRotationCount+1; i++ { + require.NoError(t, os.WriteFile(logFile, []byte(strconv.Itoa(i)), 0o644)) + assert.FileExists(t, logFile) + rotateLogs(logFile) + assert.NoFileExists(t, logFile) + for j := 1; j < i; j++ { + assert.FileExists(t, filepath.Join(logDir, "testlog-"+strconv.Itoa(j)+".log")) + } + assert.NoFileExists(t, filepath.Join(logDir, "testlog-"+strconv.Itoa(i+1)+".log")) + } +} diff --git a/app/lifecycle/logging_windows.go b/app/lifecycle/logging_windows.go new file mode 100644 index 0000000..8f20337 --- /dev/null +++ b/app/lifecycle/logging_windows.go @@ -0,0 +1,19 @@ +package lifecycle + +import ( + "fmt" + "log/slog" + "os/exec" + "syscall" +) + +func ShowLogs() { + cmd_path := "c:\\Windows\\system32\\cmd.exe" + slog.Debug(fmt.Sprintf("viewing logs with start %s", AppDataDir)) + cmd := exec.Command(cmd_path, "/c", "start", AppDataDir) + cmd.SysProcAttr = &syscall.SysProcAttr{HideWindow: false, CreationFlags: 0x08000000} + err := cmd.Start() + if err != nil { + slog.Error(fmt.Sprintf("Failed to open log dir: %s", err)) + } +} diff --git a/app/lifecycle/paths.go b/app/lifecycle/paths.go new file mode 100644 index 0000000..42ae826 --- /dev/null +++ b/app/lifecycle/paths.go @@ -0,0 +1,84 @@ +package lifecycle + +import ( + "errors" + "fmt" + "log/slog" + "os" + "path/filepath" + "runtime" + "strings" +) + +var ( + AppName = "ollama app" + CLIName = "ollama" + AppDir = "/opt/Ollama" + AppDataDir = "/opt/Ollama" + // TODO - should there be a distinct log dir? + UpdateStageDir = "/tmp" + AppLogFile = "/tmp/ollama_app.log" + ServerLogFile = "/tmp/ollama.log" + UpgradeLogFile = "/tmp/ollama_update.log" + Installer = "OllamaSetup.exe" + LogRotationCount = 5 +) + +func init() { + if runtime.GOOS == "windows" { + AppName += ".exe" + CLIName += ".exe" + // Logs, configs, downloads go to LOCALAPPDATA + localAppData := os.Getenv("LOCALAPPDATA") + AppDataDir = filepath.Join(localAppData, "Ollama") + UpdateStageDir = filepath.Join(AppDataDir, "updates") + AppLogFile = filepath.Join(AppDataDir, "app.log") + ServerLogFile = filepath.Join(AppDataDir, "server.log") + UpgradeLogFile = filepath.Join(AppDataDir, "upgrade.log") + + exe, err := os.Executable() + if err != nil { + slog.Warn("error discovering executable directory", "error", err) + AppDir = filepath.Join(localAppData, "Programs", "Ollama") + } else { + AppDir = filepath.Dir(exe) + } + + // Make sure we have PATH set correctly for any spawned children + paths := strings.Split(os.Getenv("PATH"), ";") + // Start with whatever we find in the PATH/LD_LIBRARY_PATH + found := false + for _, path := range paths { + d, err := filepath.Abs(path) + if err != nil { + continue + } + if strings.EqualFold(AppDir, d) { + found = true + } + } + if !found { + paths = append(paths, AppDir) + + pathVal := strings.Join(paths, ";") + slog.Debug("setting PATH=" + pathVal) + err := os.Setenv("PATH", pathVal) + if err != nil { + slog.Error(fmt.Sprintf("failed to update PATH: %s", err)) + } + } + + // Make sure our logging dir exists + _, err = os.Stat(AppDataDir) + if errors.Is(err, os.ErrNotExist) { + if err := os.MkdirAll(AppDataDir, 0o755); err != nil { + slog.Error(fmt.Sprintf("create ollama dir %s: %v", AppDataDir, err)) + } + } + } else if runtime.GOOS == "darwin" { + // TODO + AppName += ".app" + // } else if runtime.GOOS == "linux" { + // TODO + } +} diff --git a/app/lifecycle/server.go b/app/lifecycle/server.go new file mode 100644 index 0000000..f7aa202 --- /dev/null +++ b/app/lifecycle/server.go @@ -0,0 +1,186 @@ +package lifecycle + +import ( + "context" + "errors" + "fmt" + "io" + "log/slog" + "os" + "os/exec" + "path/filepath" + "time" + + "github.com/ollama/ollama/api" +) + +func getCLIFullPath(command string) string { + var cmdPath string + appExe, err := os.Executable() + if err == nil { + // Check both the same location as the tray app, as well as ./bin + cmdPath = filepath.Join(filepath.Dir(appExe), command) + _, err := os.Stat(cmdPath) + if err == nil { + return cmdPath + } + cmdPath = filepath.Join(filepath.Dir(appExe), "bin", command) + _, err = os.Stat(cmdPath) + if err == nil { + return cmdPath + } + } + cmdPath, err = exec.LookPath(command) + if err == nil { + _, err := os.Stat(cmdPath) + if err == nil { + return cmdPath + } + } + pwd, err := os.Getwd() + if err == nil { + cmdPath = filepath.Join(pwd, command) + _, err = os.Stat(cmdPath) + if err == nil { + return cmdPath + } + } + + return command +} + +func start(ctx context.Context, command string) (*exec.Cmd, error) { + cmd := getCmd(ctx, getCLIFullPath(command)) + stdout, err := cmd.StdoutPipe() + if err != nil { + return nil, fmt.Errorf("failed to spawn server stdout pipe: %w", err) + } + stderr, err := cmd.StderrPipe() + if err != nil { + return nil, fmt.Errorf("failed to spawn server stderr pipe: %w", err) + } + + rotateLogs(ServerLogFile) + logFile, err := os.OpenFile(ServerLogFile, os.O_APPEND|os.O_WRONLY|os.O_CREATE, 0o755) + if err != nil { + return nil, fmt.Errorf("failed to create server log: %w", err) + } + + logDir := filepath.Dir(ServerLogFile) + _, err = os.Stat(logDir) + if err != nil { + if !errors.Is(err, os.ErrNotExist) { + return nil, fmt.Errorf("stat ollama server log dir %s: %v", logDir, err) + } + + if err := os.MkdirAll(logDir, 0o755); err != nil { + return nil, fmt.Errorf("create ollama server log dir %s: %v", logDir, err) + } + } + + go func() { + defer logFile.Close() + io.Copy(logFile, stdout) //nolint:errcheck + }() + go func() { + defer logFile.Close() + io.Copy(logFile, stderr) //nolint:errcheck + }() + + // Re-wire context done behavior to attempt a graceful shutdown of the server + cmd.Cancel = func() error { + if cmd.Process != nil { + err := terminate(cmd) + if err != nil { + slog.Warn("error trying to gracefully terminate server", "err", err) + return cmd.Process.Kill() + } + + tick := time.NewTicker(10 * time.Millisecond) + defer tick.Stop() + + for { + select { + case <-tick.C: + exited, err := isProcessExited(cmd.Process.Pid) + if err != nil { + return err + } + + if exited { + return nil + } + case <-time.After(5 * time.Second): + slog.Warn("graceful server shutdown timeout, killing", "pid", cmd.Process.Pid) + return cmd.Process.Kill() + } + } + } + return nil + } + + // run the command and wait for it to finish + if err := cmd.Start(); err != nil { + return nil, fmt.Errorf("failed to start server %w", err) + } + if cmd.Process != nil { + slog.Info(fmt.Sprintf("started ollama server with pid %d", cmd.Process.Pid)) + } + slog.Info(fmt.Sprintf("ollama server logs %s", ServerLogFile)) + + return cmd, nil +} + +func SpawnServer(ctx context.Context, command string) (chan int, error) { + done := make(chan int) + + go func() { + // Keep the server running unless we're shuttind down the app + crashCount := 0 + for { + slog.Info("starting server...") + cmd, err := start(ctx, command) + if err != nil { + crashCount++ + slog.Error(fmt.Sprintf("failed to start server %s", err)) + time.Sleep(500 * time.Millisecond * time.Duration(crashCount)) + continue + } + + cmd.Wait() //nolint:errcheck + var code int + if cmd.ProcessState != nil { + code = cmd.ProcessState.ExitCode() + } + + select { + case <-ctx.Done(): + slog.Info(fmt.Sprintf("server shutdown with exit code %d", code)) + done <- code + return + default: + crashCount++ + slog.Warn(fmt.Sprintf("server crash %d - exit code %d - respawning", crashCount, code)) + time.Sleep(500 * time.Millisecond * time.Duration(crashCount)) + break + } + } + }() + + return done, nil +} + +func IsServerRunning(ctx context.Context) bool { + client, err := api.ClientFromEnvironment() + if err != nil { + slog.Info("unable to connect to server") + return false + } + err = client.Heartbeat(ctx) + if err != nil { + slog.Debug(fmt.Sprintf("heartbeat from server: %s", err)) + slog.Info("unable to connect to server") + return false + } + return true +} diff --git a/app/lifecycle/server_unix.go b/app/lifecycle/server_unix.go new file mode 100644 index 0000000..7057391 --- /dev/null +++ b/app/lifecycle/server_unix.go @@ -0,0 +1,38 @@ +//go:build !windows + +package lifecycle + +import ( + "context" + "errors" + "fmt" + "os" + "os/exec" + "syscall" +) + +func getCmd(ctx context.Context, cmd string) *exec.Cmd { + return exec.CommandContext(ctx, cmd, "serve") +} + +func terminate(cmd *exec.Cmd) error { + return cmd.Process.Signal(os.Interrupt) +} + +func isProcessExited(pid int) (bool, error) { + proc, err := os.FindProcess(pid) + if err != nil { + return false, fmt.Errorf("failed to find process: %v", err) + } + + err = proc.Signal(syscall.Signal(0)) + if err != nil { + if errors.Is(err, os.ErrProcessDone) || errors.Is(err, syscall.ESRCH) { + return true, nil + } + + return false, fmt.Errorf("error signaling process: %v", err) + } + + return false, nil +} diff --git a/app/lifecycle/server_windows.go b/app/lifecycle/server_windows.go new file mode 100644 index 0000000..5f9fe12 --- /dev/null +++ b/app/lifecycle/server_windows.go @@ -0,0 +1,91 @@ +package lifecycle + +import ( + "context" + "fmt" + "os/exec" + "syscall" + + "golang.org/x/sys/windows" +) + +func getCmd(ctx context.Context, exePath string) *exec.Cmd { + cmd := exec.CommandContext(ctx, exePath, "serve") + cmd.SysProcAttr = &syscall.SysProcAttr{ + HideWindow: true, + CreationFlags: windows.CREATE_NEW_PROCESS_GROUP, + } + + return cmd +} + +func terminate(cmd *exec.Cmd) error { + dll, err := windows.LoadDLL("kernel32.dll") + if err != nil { + return err + } + //nolint:errcheck + defer dll.Release() + + pid := cmd.Process.Pid + + f, err := dll.FindProc("AttachConsole") + if err != nil { + return err + } + + r1, _, err := f.Call(uintptr(pid)) + if r1 == 0 && err != syscall.ERROR_ACCESS_DENIED { + return err + } + + f, err = dll.FindProc("SetConsoleCtrlHandler") + if err != nil { + return err + } + + r1, _, err = f.Call(0, 1) + if r1 == 0 { + return err + } + + f, err = dll.FindProc("GenerateConsoleCtrlEvent") + if err != nil { + return err + } + + r1, _, err = f.Call(windows.CTRL_BREAK_EVENT, uintptr(pid)) + if r1 == 0 { + return err + } + + r1, _, err = f.Call(windows.CTRL_C_EVENT, uintptr(pid)) + if r1 == 0 { + return err + } + + return nil +} + +const STILL_ACTIVE = 259 + +func isProcessExited(pid int) (bool, error) { + hProcess, err := windows.OpenProcess(windows.PROCESS_QUERY_INFORMATION, false, uint32(pid)) + if err != nil { + return false, fmt.Errorf("failed to open process: %v", err) + } + //nolint:errcheck + defer windows.CloseHandle(hProcess) + + var exitCode uint32 + err = windows.GetExitCodeProcess(hProcess, &exitCode) + if err != nil { + return false, fmt.Errorf("failed to get exit code: %v", err) + } + + if exitCode == STILL_ACTIVE { + return false, nil + } + + return true, nil +} diff --git a/app/lifecycle/updater.go b/app/lifecycle/updater.go new file mode 100644 index 0000000..4d3c7d8 --- /dev/null +++ b/app/lifecycle/updater.go @@ -0,0 +1,229 @@ +package lifecycle + +import ( + "context" + "crypto/rand" + "encoding/json" + "errors" + "fmt" + "io" + "log/slog" + "mime" + "net/http" + "net/url" + "os" + "path" + "path/filepath" + "runtime" + "strconv" + "strings" + "time" + + "github.com/ollama/ollama/auth" + "github.com/ollama/ollama/version" +) + +var ( + UpdateCheckURLBase = "https://ollama.com/api/update" + UpdateDownloaded = false + UpdateCheckInterval = 60 * 60 * time.Second +) + +// TODO - maybe move up to the API package? +type UpdateResponse struct { + UpdateURL string `json:"url"` + UpdateVersion string `json:"version"` +} + +func IsNewReleaseAvailable(ctx context.Context) (bool, UpdateResponse) { + var updateResp UpdateResponse + + requestURL, err := url.Parse(UpdateCheckURLBase) + if err != nil { + return false, updateResp + } + + query := requestURL.Query() + query.Add("os", runtime.GOOS) + query.Add("arch", runtime.GOARCH) + query.Add("version", version.Version) + query.Add("ts", strconv.FormatInt(time.Now().Unix(), 10)) + + nonce, err := auth.NewNonce(rand.Reader, 16) + if err != nil { + return false, updateResp + } + + query.Add("nonce", nonce) + requestURL.RawQuery = query.Encode() + + data := []byte(fmt.Sprintf("%s,%s", http.MethodGet, requestURL.RequestURI())) + signature, err := auth.Sign(ctx, data) + if err != nil { + return false, updateResp + } + + req, err := http.NewRequestWithContext(ctx, http.MethodGet, requestURL.String(), nil) + if err != nil { + slog.Warn(fmt.Sprintf("failed to check for update: %s", err)) + return false, updateResp + } + req.Header.Set("Authorization", signature) + req.Header.Set("User-Agent", fmt.Sprintf("ollama/%s (%s %s) Go/%s", version.Version, runtime.GOARCH, runtime.GOOS, runtime.Version())) + + slog.Debug("checking for available update", "requestURL", requestURL) + resp, err := http.DefaultClient.Do(req) + if err != nil { + slog.Warn(fmt.Sprintf("failed to check for update: %s", err)) + return false, updateResp + } + defer resp.Body.Close() + + if resp.StatusCode == http.StatusNoContent { + slog.Debug("check update response 204 (current version is up to date)") + return false, updateResp + } + body, err := io.ReadAll(resp.Body) + if err != nil { + slog.Warn(fmt.Sprintf("failed to read body response: %s", err)) + } + + if resp.StatusCode != http.StatusOK { + slog.Info(fmt.Sprintf("check update error %d - %.96s", resp.StatusCode, string(body))) + return false, updateResp + } + err = json.Unmarshal(body, &updateResp) + if err != nil { + slog.Warn(fmt.Sprintf("malformed response checking for update: %s", err)) + return false, updateResp + } + // Extract the version string from the URL in the github release artifact path + updateResp.UpdateVersion = path.Base(path.Dir(updateResp.UpdateURL)) + + slog.Info("New update available at " + updateResp.UpdateURL) + return true, updateResp +} + +func DownloadNewRelease(ctx context.Context, updateResp UpdateResponse) error { + // Do a head first to check etag info + req, err := http.NewRequestWithContext(ctx, http.MethodHead, updateResp.UpdateURL, nil) + if err != nil { + return err + } + + resp, err := http.DefaultClient.Do(req) + if err != nil { + return fmt.Errorf("error checking update: %w", err) + } + if resp.StatusCode != http.StatusOK { + return fmt.Errorf("unexpected status attempting to download update %d", resp.StatusCode) + } + resp.Body.Close() + etag := strings.Trim(resp.Header.Get("etag"), "\"") + if etag == "" { + slog.Debug("no etag detected, falling back to filename based dedup") + etag = "_" + } + filename := Installer + _, params, err := mime.ParseMediaType(resp.Header.Get("content-disposition")) + if err == nil { + filename = params["filename"] + } + + stageFilename := filepath.Join(UpdateStageDir, etag, filename) + + // Check to see if we already have it downloaded + _, err = os.Stat(stageFilename) + if err == nil { + slog.Info("update already downloaded") + return nil + } + + cleanupOldDownloads() + + req.Method = http.MethodGet + resp, err = http.DefaultClient.Do(req) + if err != nil { + return fmt.Errorf("error checking update: %w", err) + } + defer resp.Body.Close() + etag = strings.Trim(resp.Header.Get("etag"), "\"") + if etag == "" { + slog.Debug("no etag detected, falling back to filename based dedup") // TODO probably can get rid of this redundant log + etag = "_" + } + + stageFilename = filepath.Join(UpdateStageDir, etag, filename) + + _, err = os.Stat(filepath.Dir(stageFilename)) + if errors.Is(err, os.ErrNotExist) { + if err := os.MkdirAll(filepath.Dir(stageFilename), 0o755); err != nil { + return fmt.Errorf("create ollama dir %s: %v", filepath.Dir(stageFilename), err) + } + } + + payload, err := io.ReadAll(resp.Body) + if err != nil { + return fmt.Errorf("failed to read body response: %w", err) + } + fp, err := os.OpenFile(stageFilename, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0o755) + if err != nil { + return fmt.Errorf("write payload %s: %w", stageFilename, err) + } + defer fp.Close() + if n, err := fp.Write(payload); err != nil || n != len(payload) { + return fmt.Errorf("write payload %s: %d vs %d -- %w", stageFilename, n, len(payload), err) + } + slog.Info("new update downloaded " + stageFilename) + + UpdateDownloaded = true + return nil +} + +func cleanupOldDownloads() { + files, err := os.ReadDir(UpdateStageDir) + if err != nil && errors.Is(err, os.ErrNotExist) { + // Expected behavior on first run + return + } else if err != nil { + slog.Warn(fmt.Sprintf("failed to list stage dir: %s", err)) + return + } + for _, file := range files { + fullname := filepath.Join(UpdateStageDir, file.Name()) + slog.Debug("cleaning up old download: " + fullname) + err = os.RemoveAll(fullname) + if err != nil { + slog.Warn(fmt.Sprintf("failed to cleanup stale update download %s", err)) + } + } +} + +func StartBackgroundUpdaterChecker(ctx context.Context, cb func(string) error) { + go func() { + // Don't blast an update message immediately after startup + // time.Sleep(30 * time.Second) + time.Sleep(3 * time.Second) + + for { + available, resp := IsNewReleaseAvailable(ctx) + if available { + err := DownloadNewRelease(ctx, resp) + if err != nil { + slog.Error(fmt.Sprintf("failed to download new release: %s", err)) + } + err = cb(resp.UpdateVersion) + if err != nil { + slog.Warn(fmt.Sprintf("failed to register update available with tray: %s", err)) + } + } + select { + case <-ctx.Done(): + slog.Debug("stopping background update checker") + return + default: + time.Sleep(UpdateCheckInterval) + } + } + }() +} diff --git a/app/lifecycle/updater_nonwindows.go b/app/lifecycle/updater_nonwindows.go new file mode 100644 index 0000000..1d2dda8 --- /dev/null +++ b/app/lifecycle/updater_nonwindows.go @@ -0,0 +1,12 @@ +//go:build !windows + +package lifecycle + +import ( + "context" + "errors" +) + +func DoUpgrade(cancel context.CancelFunc, done chan int) error { + return errors.New("not implemented") +} diff --git a/app/lifecycle/updater_windows.go b/app/lifecycle/updater_windows.go new file mode 100644 index 0000000..293dd60 --- /dev/null +++ b/app/lifecycle/updater_windows.go @@ -0,0 +1,74 @@ +package lifecycle + +import ( + "context" + "errors" + "fmt" + "log/slog" + "os" + "os/exec" + "path/filepath" +) + +func DoUpgrade(cancel context.CancelFunc, done chan int) error { + files, err := filepath.Glob(filepath.Join(UpdateStageDir, "*", "*.exe")) // TODO generalize for multiplatform + if err != nil { + return fmt.Errorf("failed to lookup downloads: %s", err) + } + if len(files) == 0 { + return errors.New("no update downloads found") + } else if len(files) > 1 { + // Shouldn't happen + slog.Warn(fmt.Sprintf("multiple downloads found, using first one %v", files)) + } + installerExe := files[0] + + slog.Info("starting upgrade with " + installerExe) + slog.Info("upgrade log file " + UpgradeLogFile) + + // make the upgrade show progress, but non interactive + installArgs := []string{ + "/CLOSEAPPLICATIONS", // Quit the tray app if it's still running + "/LOG=" + filepath.Base(UpgradeLogFile), // Only relative seems reliable, so set pwd + "/FORCECLOSEAPPLICATIONS", // Force close the tray app - might be needed + "/SP", // Skip the "This will install... Do you wish to continue" prompt + "/NOCANCEL", // Disable the ability to cancel upgrade mid-flight to avoid partially installed upgrades + "/SILENT", + } + + // Safeguard in case we have requests in flight that need to drain... + slog.Info("Waiting for server to shutdown") + cancel() + if done != nil { + <-done + } else { + // Shouldn't happen + slog.Warn("done chan was nil, not actually waiting") + } + + slog.Debug(fmt.Sprintf("starting installer: %s %v", installerExe, installArgs)) + os.Chdir(filepath.Dir(UpgradeLogFile)) //nolint:errcheck + cmd := exec.Command(installerExe, installArgs...) + + if err := cmd.Start(); err != nil { + return fmt.Errorf("unable to start ollama app %w", err) + } + + if cmd.Process != nil { + err = cmd.Process.Release() + if err != nil { + slog.Error(fmt.Sprintf("failed to release server process: %s", err)) + } + } else { + // TODO - some details about why it didn't start, or is this a pedantic error case? + return errors.New("installer process did not start") + } + + // TODO should we linger for a moment and check to make sure it's actually running by checking the pid? + + slog.Info("Installer started in background, exiting") + + os.Exit(0) + // Not reached + return nil +} diff --git a/app/main.go b/app/main.go new file mode 100644 index 0000000..db82979 --- /dev/null +++ b/app/main.go @@ -0,0 +1,12 @@ +package main + +// Compile with the following to get rid of the cmd pop up on windows +// go build -ldflags="-H windowsgui" . + +import ( + "github.com/ollama/ollama/app/lifecycle" +) + +func main() { + lifecycle.Run() +} diff --git a/app/ollama.iss b/app/ollama.iss new file mode 100644 index 0000000..d575fc7 --- /dev/null +++ b/app/ollama.iss @@ -0,0 +1,204 @@ +; Inno Setup Installer for Ollama +; +; To build the installer use the build script invoked from the top of the source tree +; +; powershell -ExecutionPolicy Bypass -File .\scripts\build_windows.ps + + +#define MyAppName "Ollama" +#if GetEnv("PKG_VERSION") != "" + #define MyAppVersion GetEnv("PKG_VERSION") +#else + #define MyAppVersion "0.0.0" +#endif +#define MyAppPublisher "Ollama" +#define MyAppURL "https://ollama.com/" +#define MyAppExeName "ollama app.exe" +#define MyIcon ".\assets\app.ico" + +[Setup] +; NOTE: The value of AppId uniquely identifies this application. Do not use the same AppId value in installers for other applications. +; (To generate a new GUID, click Tools | Generate GUID inside the IDE.) +AppId={{44E83376-CE68-45EB-8FC1-393500EB558C} +AppName={#MyAppName} +AppVersion={#MyAppVersion} +VersionInfoVersion={#MyAppVersion} +;AppVerName={#MyAppName} {#MyAppVersion} +AppPublisher={#MyAppPublisher} +AppPublisherURL={#MyAppURL} +AppSupportURL={#MyAppURL} +AppUpdatesURL={#MyAppURL} +ArchitecturesAllowed=x64compatible arm64 +ArchitecturesInstallIn64BitMode=x64compatible arm64 +DefaultDirName={localappdata}\Programs\{#MyAppName} +DefaultGroupName={#MyAppName} +DisableProgramGroupPage=yes +PrivilegesRequired=lowest +OutputBaseFilename="OllamaSetup" +SetupIconFile={#MyIcon} +UninstallDisplayIcon={uninstallexe} +Compression=lzma2 +SolidCompression=no +WizardStyle=modern +ChangesEnvironment=yes +OutputDir=..\dist\ + +; Disable logging once everything's battle tested +; Filename will be %TEMP%\Setup Log*.txt +SetupLogging=yes +CloseApplications=yes +RestartApplications=no +RestartIfNeededByRun=no + +; https://jrsoftware.org/ishelp/index.php?topic=setup_wizardimagefile +WizardSmallImageFile=.\assets\setup.bmp + +; Ollama requires Windows 10 22H2 or newer for proper unicode rendering +; TODO: consider setting this to 10.0.19045 +MinVersion=10.0.10240 + +; First release that supports WinRT UI Composition for win32 apps +; MinVersion=10.0.17134 +; First release with XAML Islands - possible UI path forward +; MinVersion=10.0.18362 + +; quiet... +DisableDirPage=yes +DisableFinishedPage=yes +DisableReadyMemo=yes +DisableReadyPage=yes +DisableStartupPrompt=yes +DisableWelcomePage=yes + +; TODO - percentage can't be set less than 100, so how to make it shorter? +; WizardSizePercent=100,80 + +#if GetEnv("KEY_CONTAINER") +SignTool=MySignTool +SignedUninstaller=yes +#endif + +SetupMutex=OllamaSetupMutex + +[Languages] +Name: "english"; MessagesFile: "compiler:Default.isl" + +[LangOptions] +DialogFontSize=12 + +[Files] +#if DirExists("..\dist\windows-amd64") +Source: "..\dist\windows-amd64-app.exe"; DestDir: "{app}"; DestName: "{#MyAppExeName}" ;Check: not IsArm64(); Flags: ignoreversion 64bit +Source: "..\dist\windows-amd64\ollama.exe"; DestDir: "{app}"; Check: not IsArm64(); Flags: ignoreversion 64bit +Source: "..\dist\windows-amd64\lib\ollama\*"; DestDir: "{app}\lib\ollama\"; Check: not IsArm64(); Flags: ignoreversion 64bit recursesubdirs +#endif + +#if DirExists("..\dist\windows-arm64") +Source: "..\dist\windows-arm64\vc_redist.arm64.exe"; DestDir: "{tmp}"; Check: IsArm64() and vc_redist_needed(); Flags: deleteafterinstall +Source: "..\dist\windows-arm64-app.exe"; DestDir: "{app}"; DestName: "{#MyAppExeName}" ;Check: IsArm64(); Flags: ignoreversion 64bit +Source: "..\dist\windows-arm64\ollama.exe"; DestDir: "{app}"; Check: IsArm64(); Flags: ignoreversion 64bit +#endif + +Source: "..\dist\ollama_welcome.ps1"; DestDir: "{app}"; Flags: ignoreversion +Source: ".\assets\app.ico"; DestDir: "{app}"; Flags: ignoreversion + +[Icons] +Name: "{group}\{#MyAppName}"; Filename: "{app}\{#MyAppExeName}"; IconFilename: "{app}\app.ico" +Name: "{userstartup}\{#MyAppName}"; Filename: "{app}\{#MyAppExeName}"; IconFilename: "{app}\app.ico" +Name: "{userprograms}\{#MyAppName}"; Filename: "{app}\{#MyAppExeName}"; IconFilename: "{app}\app.ico" + +[Run] +#if DirExists("..\dist\windows-arm64") +Filename: "{tmp}\vc_redist.arm64.exe"; Parameters: "/install /passive /norestart"; Check: IsArm64() and vc_redist_needed(); StatusMsg: "Installing VC++ Redistributables..."; Flags: waituntilterminated +#endif +Filename: "{cmd}"; Parameters: "/C set PATH={app};%PATH% & ""{app}\{#MyAppExeName}"""; Flags: postinstall nowait runhidden + +[UninstallRun] +; Filename: "{cmd}"; Parameters: "/C ""taskkill /im ''{#MyAppExeName}'' /f /t"; Flags: runhidden +; Filename: "{cmd}"; Parameters: "/C ""taskkill /im ollama.exe /f /t"; Flags: runhidden +Filename: "taskkill"; Parameters: "/im ""{#MyAppExeName}"" /f /t"; Flags: runhidden +Filename: "taskkill"; Parameters: "/im ""ollama.exe"" /f /t"; Flags: runhidden +; HACK! need to give the server and app enough time to exit +; TODO - convert this to a Pascal code script so it waits until they're no longer running, then completes +Filename: "{cmd}"; Parameters: "/c timeout 5"; Flags: runhidden + +[UninstallDelete] +Type: filesandordirs; Name: "{%TEMP}\ollama*" +Type: filesandordirs; Name: "{%LOCALAPPDATA}\Ollama" +Type: filesandordirs; Name: "{%LOCALAPPDATA}\Programs\Ollama" +Type: filesandordirs; Name: "{%USERPROFILE}\.ollama\models" +Type: filesandordirs; Name: "{%USERPROFILE}\.ollama\history" +; NOTE: if the user has a custom OLLAMA_MODELS it will be preserved + +[InstallDelete] +Type: filesandordirs; Name: "{%TEMP}\ollama*" +Type: filesandordirs; Name: "{%LOCALAPPDATA}\Programs\Ollama" + +[Messages] +WizardReady=Ollama +ReadyLabel1=%nLet's get you up and running with your own large language models. +SetupAppRunningError=Another Ollama installer is running.%n%nPlease cancel or finish the other installer, then click OK to continue with this install, or Cancel to exit. + + +;FinishedHeadingLabel=Run your first model +;FinishedLabel=%nRun this command in a PowerShell or cmd terminal.%n%n%n ollama run llama3.2 +;ClickFinish=%n + +[Registry] +Root: HKCU; Subkey: "Environment"; \ + ValueType: expandsz; ValueName: "Path"; ValueData: "{olddata};{app}"; \ + Check: NeedsAddPath('{app}') + +[Code] + +function NeedsAddPath(Param: string): boolean; +var + OrigPath: string; +begin + if not RegQueryStringValue(HKEY_CURRENT_USER, + 'Environment', + 'Path', OrigPath) + then begin + Result := True; + exit; + end; + { look for the path with leading and trailing semicolon } + { Pos() returns 0 if not found } + Result := Pos(';' + ExpandConstant(Param) + ';', ';' + OrigPath + ';') = 0; +end; + +{ --- VC Runtime libraries discovery code - Only install vc_redist if it isn't already installed ----- } +const VCRTL_MIN_V1 = 14; +const VCRTL_MIN_V2 = 40; +const VCRTL_MIN_V3 = 33807; +const VCRTL_MIN_V4 = 0; + + // check if the minimum required vc redist is installed (by looking the registry) +function vc_redist_needed (): Boolean; +var + sRegKey: string; + v1: Cardinal; + v2: Cardinal; + v3: Cardinal; + v4: Cardinal; +begin + sRegKey := 'SOFTWARE\WOW6432Node\Microsoft\VisualStudio\14.0\VC\Runtimes\arm64'; + if (RegQueryDWordValue (HKEY_LOCAL_MACHINE, sRegKey, 'Major', v1) and + RegQueryDWordValue (HKEY_LOCAL_MACHINE, sRegKey, 'Minor', v2) and + RegQueryDWordValue (HKEY_LOCAL_MACHINE, sRegKey, 'Bld', v3) and + RegQueryDWordValue (HKEY_LOCAL_MACHINE, sRegKey, 'RBld', v4)) then + begin + Log ('VC Redist version: ' + IntToStr (v1) + + '.' + IntToStr (v2) + '.' + IntToStr (v3) + + '.' + IntToStr (v4)); + { Version info was found. Return true if later or equal to our + minimal required version RTL_MIN_Vx } + Result := not ( + (v1 > VCRTL_MIN_V1) or ((v1 = VCRTL_MIN_V1) and + ((v2 > VCRTL_MIN_V2) or ((v2 = VCRTL_MIN_V2) and + ((v3 > VCRTL_MIN_V3) or ((v3 = VCRTL_MIN_V3) and + (v4 >= VCRTL_MIN_V4))))))); + end + else + Result := TRUE; +end; diff --git a/app/ollama.rc b/app/ollama.rc new file mode 100644 index 0000000..acd8449 --- /dev/null +++ b/app/ollama.rc @@ -0,0 +1,29 @@ +#include + +VS_VERSION_INFO VERSIONINFO + FILEFLAGSMASK 0x3fL +#ifdef _DEBUG + FILEFLAGS 0x1L +#else + FILEFLAGS 0x0L +#endif + FILEOS 0x40004L + FILETYPE 0x1L + FILESUBTYPE 0x0L +BEGIN + BLOCK "StringFileInfo" + BEGIN + BLOCK "040904b0" + BEGIN + VALUE "FileDescription", "Ollama" + VALUE "InternalName", "Ollama" + VALUE "OriginalFilename", "ollama app.exe" + VALUE "ProductName", "Ollama" + END + END + + BLOCK "VarFileInfo" + BEGIN + VALUE "Translation", 0x409, 1200 + END +END diff --git a/app/ollama_welcome.ps1 b/app/ollama_welcome.ps1 new file mode 100644 index 0000000..e969574 --- /dev/null +++ b/app/ollama_welcome.ps1 @@ -0,0 +1,8 @@ +# TODO - consider ANSI colors and maybe ASCII art... +write-host "" +write-host "Welcome to Ollama!" +write-host "" +write-host "Run your first model:" +write-host "" +write-host "`tollama run llama3.2" +write-host "" \ No newline at end of file diff --git a/app/store/store.go b/app/store/store.go new file mode 100644 index 0000000..370436c --- /dev/null +++ b/app/store/store.go @@ -0,0 +1,97 @@ +package store + +import ( + "encoding/json" + "errors" + "fmt" + "log/slog" + "os" + "path/filepath" + "sync" + + "github.com/google/uuid" +) + +type Store struct { + ID string `json:"id"` + FirstTimeRun bool `json:"first-time-run"` +} + +var ( + lock sync.Mutex + store Store +) + +func GetID() string { + lock.Lock() + defer lock.Unlock() + if store.ID == "" { + initStore() + } + return store.ID +} + +func GetFirstTimeRun() bool { + lock.Lock() + defer lock.Unlock() + if store.ID == "" { + initStore() + } + return store.FirstTimeRun +} + +func SetFirstTimeRun(val bool) { + lock.Lock() + defer lock.Unlock() + if store.FirstTimeRun == val { + return + } + store.FirstTimeRun = val + writeStore(getStorePath()) +} + +// lock must be held +func initStore() { + storeFile, err := os.Open(getStorePath()) + if err == nil { + defer storeFile.Close() + err = json.NewDecoder(storeFile).Decode(&store) + if err == nil { + slog.Debug(fmt.Sprintf("loaded existing store %s - ID: %s", getStorePath(), store.ID)) + return + } + } else if !errors.Is(err, os.ErrNotExist) { + slog.Debug(fmt.Sprintf("unexpected error searching for store: %s", err)) + } + slog.Debug("initializing new store") + store.ID = uuid.NewString() + writeStore(getStorePath()) +} + +func writeStore(storeFilename string) { + ollamaDir := filepath.Dir(storeFilename) + _, err := os.Stat(ollamaDir) + if errors.Is(err, os.ErrNotExist) { + if err := os.MkdirAll(ollamaDir, 0o755); err != nil { + slog.Error(fmt.Sprintf("create ollama dir %s: %v", ollamaDir, err)) + return + } + } + payload, err := json.Marshal(store) + if err != nil { + slog.Error(fmt.Sprintf("failed to marshal store: %s", err)) + return + } + fp, err := os.OpenFile(storeFilename, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0o755) + if err != nil { + slog.Error(fmt.Sprintf("write store payload %s: %v", storeFilename, err)) + return + } + defer fp.Close() + if n, err := fp.Write(payload); err != nil || n != len(payload) { + slog.Error(fmt.Sprintf("write store payload %s: %d vs %d -- %v", storeFilename, n, len(payload), err)) + return + } + slog.Debug("Store contents: " + string(payload)) + slog.Info(fmt.Sprintf("wrote store: %s", storeFilename)) +} diff --git a/app/store/store_darwin.go b/app/store/store_darwin.go new file mode 100644 index 0000000..e53d852 --- /dev/null +++ b/app/store/store_darwin.go @@ -0,0 +1,13 @@ +package store + +import ( + "os" + "path/filepath" +) + +func getStorePath() string { + // TODO - system wide location? + + home := os.Getenv("HOME") + return filepath.Join(home, "Library", "Application Support", "Ollama", "config.json") +} diff --git a/app/store/store_linux.go b/app/store/store_linux.go new file mode 100644 index 0000000..3aac9b0 --- /dev/null +++ b/app/store/store_linux.go @@ -0,0 +1,16 @@ +package store + +import ( + "os" + "path/filepath" +) + +func getStorePath() string { + if os.Geteuid() == 0 { + // TODO where should we store this on linux for system-wide operation? + return "/etc/ollama/config.json" + } + + home := os.Getenv("HOME") + return filepath.Join(home, ".ollama", "config.json") +} diff --git a/app/store/store_windows.go b/app/store/store_windows.go new file mode 100644 index 0000000..ba06b82 --- /dev/null +++ b/app/store/store_windows.go @@ -0,0 +1,11 @@ +package store + +import ( + "os" + "path/filepath" +) + +func getStorePath() string { + localAppData := os.Getenv("LOCALAPPDATA") + return filepath.Join(localAppData, "Ollama", "config.json") +} diff --git a/app/tray/commontray/types.go b/app/tray/commontray/types.go new file mode 100644 index 0000000..ed633dc --- /dev/null +++ b/app/tray/commontray/types.go @@ -0,0 +1,24 @@ +package commontray + +var ( + Title = "Ollama" + ToolTip = "Ollama" + + UpdateIconName = "tray_upgrade" + IconName = "tray" +) + +type Callbacks struct { + Quit chan struct{} + Update chan struct{} + DoFirstUse chan struct{} + ShowLogs chan struct{} +} + +type OllamaTray interface { + GetCallbacks() Callbacks + Run() + UpdateAvailable(ver string) error + DisplayFirstUseNotification() error + Quit() +} diff --git a/app/tray/tray.go b/app/tray/tray.go new file mode 100644 index 0000000..dfa6343 --- /dev/null +++ b/app/tray/tray.go @@ -0,0 +1,28 @@ +package tray + +import ( + "fmt" + "runtime" + + "github.com/ollama/ollama/app/assets" + "github.com/ollama/ollama/app/tray/commontray" +) + +func NewTray() (commontray.OllamaTray, error) { + extension := ".png" + if runtime.GOOS == "windows" { + extension = ".ico" + } + iconName := commontray.UpdateIconName + extension + updateIcon, err := assets.GetIcon(iconName) + if err != nil { + return nil, fmt.Errorf("failed to load icon %s: %w", iconName, err) + } + iconName = commontray.IconName + extension + icon, err := assets.GetIcon(iconName) + if err != nil { + return nil, fmt.Errorf("failed to load icon %s: %w", iconName, err) + } + + return InitPlatformTray(icon, updateIcon) +} diff --git a/app/tray/tray_nonwindows.go b/app/tray/tray_nonwindows.go new file mode 100644 index 0000000..a03d233 --- /dev/null +++ b/app/tray/tray_nonwindows.go @@ -0,0 +1,13 @@ +//go:build !windows + +package tray + +import ( + "errors" + + "github.com/ollama/ollama/app/tray/commontray" +) + +func InitPlatformTray(icon, updateIcon []byte) (commontray.OllamaTray, error) { + return nil, errors.New("not implemented") +} diff --git a/app/tray/tray_windows.go b/app/tray/tray_windows.go new file mode 100644 index 0000000..086fc79 --- /dev/null +++ b/app/tray/tray_windows.go @@ -0,0 +1,10 @@ +package tray + +import ( + "github.com/ollama/ollama/app/tray/commontray" + "github.com/ollama/ollama/app/tray/wintray" +) + +func InitPlatformTray(icon, updateIcon []byte) (commontray.OllamaTray, error) { + return wintray.InitTray(icon, updateIcon) +} diff --git a/app/tray/wintray/eventloop.go b/app/tray/wintray/eventloop.go new file mode 100644 index 0000000..35608a4 --- /dev/null +++ b/app/tray/wintray/eventloop.go @@ -0,0 +1,181 @@ +//go:build windows + +package wintray + +import ( + "fmt" + "log/slog" + "sync" + "unsafe" + + "golang.org/x/sys/windows" +) + +var quitOnce sync.Once + +func (t *winTray) Run() { + nativeLoop() +} + +func nativeLoop() { + // Main message pump. + slog.Debug("starting event handling loop") + m := &struct { + WindowHandle windows.Handle + Message uint32 + Wparam uintptr + Lparam uintptr + Time uint32 + Pt point + LPrivate uint32 + }{} + for { + ret, _, err := pGetMessage.Call(uintptr(unsafe.Pointer(m)), 0, 0, 0) + + // If the function retrieves a message other than WM_QUIT, the return value is nonzero. + // If the function retrieves the WM_QUIT message, the return value is zero. + // If there is an error, the return value is -1 + // https://msdn.microsoft.com/en-us/library/windows/desktop/ms644936(v=vs.85).aspx + switch int32(ret) { + case -1: + slog.Error(fmt.Sprintf("get message failure: %v", err)) + return + case 0: + return + default: + pTranslateMessage.Call(uintptr(unsafe.Pointer(m))) //nolint:errcheck + pDispatchMessage.Call(uintptr(unsafe.Pointer(m))) //nolint:errcheck + } + } +} + +// WindowProc callback function that processes messages sent to a window. +// https://msdn.microsoft.com/en-us/library/windows/desktop/ms633573(v=vs.85).aspx +func (t *winTray) wndProc(hWnd windows.Handle, message uint32, wParam, lParam uintptr) (lResult uintptr) { + const ( + WM_RBUTTONUP = 0x0205 + WM_LBUTTONUP = 0x0202 + WM_COMMAND = 0x0111 + WM_ENDSESSION = 0x0016 + WM_CLOSE = 0x0010 + WM_DESTROY = 0x0002 + WM_MOUSEMOVE = 0x0200 + WM_LBUTTONDOWN = 0x0201 + ) + switch message { + case WM_COMMAND: + menuItemId := int32(wParam) + // https://docs.microsoft.com/en-us/windows/win32/menurc/wm-command#menus + switch menuItemId { + case quitMenuID: + select { + case t.callbacks.Quit <- struct{}{}: + // should not happen but in case not listening + default: + slog.Error("no listener on Quit") + } + case updateMenuID: + select { + case t.callbacks.Update <- struct{}{}: + // should not happen but in case not listening + default: + slog.Error("no listener on Update") + } + case diagLogsMenuID: + select { + case t.callbacks.ShowLogs <- struct{}{}: + // should not happen but in case not listening + default: + slog.Error("no listener on ShowLogs") + } + default: + slog.Debug(fmt.Sprintf("Unexpected menu item id: %d", menuItemId)) + } + case WM_CLOSE: + boolRet, _, err := pDestroyWindow.Call(uintptr(t.window)) + if boolRet == 0 { + slog.Error(fmt.Sprintf("failed to destroy window: %s", err)) + } + err = t.wcex.unregister() + if err != nil { + slog.Error(fmt.Sprintf("failed to unregister window %s", err)) + } + case WM_DESTROY: + // same as WM_ENDSESSION, but throws 0 exit code after all + defer pPostQuitMessage.Call(uintptr(int32(0))) //nolint:errcheck + fallthrough + case WM_ENDSESSION: + t.muNID.Lock() + if t.nid != nil { + err := t.nid.delete() + if err != nil { + slog.Error(fmt.Sprintf("failed to delete nid: %s", err)) + } + } + t.muNID.Unlock() + case t.wmSystrayMessage: + switch lParam { + case WM_MOUSEMOVE, WM_LBUTTONDOWN: + // Ignore these... + case WM_RBUTTONUP, WM_LBUTTONUP: + err := t.showMenu() + if err != nil { + slog.Error(fmt.Sprintf("failed to show menu: %s", err)) + } + case 0x405: // TODO - how is this magic value derived for the notification left click + if t.pendingUpdate { + select { + case t.callbacks.Update <- struct{}{}: + // should not happen but in case not listening + default: + slog.Error("no listener on Update") + } + } else { + select { + case t.callbacks.DoFirstUse <- struct{}{}: + // should not happen but in case not listening + default: + slog.Error("no listener on DoFirstUse") + } + } + case 0x404: // Middle click or close notification + // slog.Debug("doing nothing on close of first time notification") + default: + // 0x402 also seems common - what is it? + slog.Debug(fmt.Sprintf("unmanaged app message, lParm: 0x%x", lParam)) + } + case t.wmTaskbarCreated: // on explorer.exe restarts + t.muNID.Lock() + err := t.nid.add() + if err != nil { + slog.Error(fmt.Sprintf("failed to refresh the taskbar on explorer restart: %s", err)) + } + t.muNID.Unlock() + default: + // Calls the default window procedure to provide default processing for any window messages that an application does not process. + // https://msdn.microsoft.com/en-us/library/windows/desktop/ms633572(v=vs.85).aspx + lResult, _, _ = pDefWindowProc.Call( + uintptr(hWnd), + uintptr(message), + wParam, + lParam, + ) + } + return +} + +func (t *winTray) Quit() { + quitOnce.Do(quit) +} + +func quit() { + boolRet, _, err := pPostMessage.Call( + uintptr(wt.window), + WM_CLOSE, + 0, + 0, + ) + if boolRet == 0 { + slog.Error(fmt.Sprintf("failed to post close message on shutdown %s", err)) + } +} diff --git a/app/tray/wintray/menus.go b/app/tray/wintray/menus.go new file mode 100644 index 0000000..0b13d7c --- /dev/null +++ b/app/tray/wintray/menus.go @@ -0,0 +1,72 @@ +//go:build windows + +package wintray + +import ( + "fmt" + "log/slog" + "unsafe" + + "golang.org/x/sys/windows" +) + +const ( + _ = iota + updateAvailableMenuID + updateMenuID + separatorMenuID + diagLogsMenuID + diagSeparatorMenuID + quitMenuID +) + +func (t *winTray) initMenus() error { + if err := t.addOrUpdateMenuItem(diagLogsMenuID, 0, diagLogsMenuTitle, false); err != nil { + return fmt.Errorf("unable to create menu entries %w\n", err) + } + if err := t.addSeparatorMenuItem(diagSeparatorMenuID, 0); err != nil { + return fmt.Errorf("unable to create menu entries %w", err) + } + if err := t.addOrUpdateMenuItem(quitMenuID, 0, quitMenuTitle, false); err != nil { + return fmt.Errorf("unable to create menu entries %w\n", err) + } + return nil +} + +func (t *winTray) UpdateAvailable(ver string) error { + if !t.updateNotified { + slog.Debug("updating menu and sending notification for new update") + if err := t.addOrUpdateMenuItem(updateAvailableMenuID, 0, updateAvailableMenuTitle, true); err != nil { + return fmt.Errorf("unable to create menu entries %w", err) + } + if err := t.addOrUpdateMenuItem(updateMenuID, 0, updateMenuTitle, false); err != nil { + return fmt.Errorf("unable to create menu entries %w", err) + } + if err := t.addSeparatorMenuItem(separatorMenuID, 0); err != nil { + return fmt.Errorf("unable to create menu entries %w", err) + } + iconFilePath, err := iconBytesToFilePath(wt.updateIcon) + if err != nil { + return fmt.Errorf("unable to write icon data to temp file: %w", err) + } + if err := wt.setIcon(iconFilePath); err != nil { + return fmt.Errorf("unable to set icon: %w", err) + } + t.updateNotified = true + + t.pendingUpdate = true + // Now pop up the notification + t.muNID.Lock() + defer t.muNID.Unlock() + copy(t.nid.InfoTitle[:], windows.StringToUTF16(updateTitle)) + copy(t.nid.Info[:], windows.StringToUTF16(fmt.Sprintf(updateMessage, ver))) + t.nid.Flags |= NIF_INFO + t.nid.Timeout = 10 + t.nid.Size = uint32(unsafe.Sizeof(*wt.nid)) + err = t.nid.modify() + if err != nil { + return err + } + } + return nil +} diff --git a/app/tray/wintray/messages.go b/app/tray/wintray/messages.go new file mode 100644 index 0000000..64a4785 --- /dev/null +++ b/app/tray/wintray/messages.go @@ -0,0 +1,15 @@ +//go:build windows + +package wintray + +const ( + firstTimeTitle = "Ollama is running" + firstTimeMessage = "Click here to get started" + updateTitle = "Update available" + updateMessage = "Ollama version %s is ready to install" + + quitMenuTitle = "Quit Ollama" + updateAvailableMenuTitle = "An update is available" + updateMenuTitle = "Restart to update" + diagLogsMenuTitle = "View logs" +) diff --git a/app/tray/wintray/notifyicon.go b/app/tray/wintray/notifyicon.go new file mode 100644 index 0000000..4707166 --- /dev/null +++ b/app/tray/wintray/notifyicon.go @@ -0,0 +1,66 @@ +//go:build windows + +package wintray + +import ( + "unsafe" + + "golang.org/x/sys/windows" +) + +// Contains information that the system needs to display notifications in the notification area. +// Used by Shell_NotifyIcon. +// https://msdn.microsoft.com/en-us/library/windows/desktop/bb773352(v=vs.85).aspx +// https://msdn.microsoft.com/en-us/library/windows/desktop/bb762159 +type notifyIconData struct { + Size uint32 + Wnd windows.Handle + ID, Flags, CallbackMessage uint32 + Icon windows.Handle + Tip [128]uint16 + State, StateMask uint32 + Info [256]uint16 + // Timeout, Version uint32 + Timeout uint32 + + InfoTitle [64]uint16 + InfoFlags uint32 + GuidItem windows.GUID + BalloonIcon windows.Handle +} + +func (nid *notifyIconData) add() error { + const NIM_ADD = 0x00000000 + res, _, err := pShellNotifyIcon.Call( + uintptr(NIM_ADD), + uintptr(unsafe.Pointer(nid)), + ) + if res == 0 { + return err + } + return nil +} + +func (nid *notifyIconData) modify() error { + const NIM_MODIFY = 0x00000001 + res, _, err := pShellNotifyIcon.Call( + uintptr(NIM_MODIFY), + uintptr(unsafe.Pointer(nid)), + ) + if res == 0 { + return err + } + return nil +} + +func (nid *notifyIconData) delete() error { + const NIM_DELETE = 0x00000002 + res, _, err := pShellNotifyIcon.Call( + uintptr(NIM_DELETE), + uintptr(unsafe.Pointer(nid)), + ) + if res == 0 { + return err + } + return nil +} diff --git a/app/tray/wintray/tray.go b/app/tray/wintray/tray.go new file mode 100644 index 0000000..19fa98e --- /dev/null +++ b/app/tray/wintray/tray.go @@ -0,0 +1,488 @@ +//go:build windows + +package wintray + +import ( + "crypto/md5" + "encoding/hex" + "fmt" + "log/slog" + "os" + "path/filepath" + "sort" + "sync" + "syscall" + "unsafe" + + "golang.org/x/sys/windows" + + "github.com/ollama/ollama/app/tray/commontray" +) + +// Helpful sources: https://github.com/golang/exp/blob/master/shiny/driver/internal/win32 + +// Contains information about loaded resources +type winTray struct { + instance, + icon, + cursor, + window windows.Handle + + loadedImages map[string]windows.Handle + muLoadedImages sync.RWMutex + + // menus keeps track of the submenus keyed by the menu item ID, plus 0 + // which corresponds to the main popup menu. + menus map[uint32]windows.Handle + muMenus sync.RWMutex + menuOf map[uint32]windows.Handle + muMenuOf sync.RWMutex + // menuItemIcons maintains the bitmap of each menu item (if applies). It's + // needed to show the icon correctly when showing a previously hidden menu + // item again. + // menuItemIcons map[uint32]windows.Handle + // muMenuItemIcons sync.RWMutex + visibleItems map[uint32][]uint32 + muVisibleItems sync.RWMutex + + nid *notifyIconData + muNID sync.RWMutex + wcex *wndClassEx + + wmSystrayMessage, + wmTaskbarCreated uint32 + + pendingUpdate bool + updateNotified bool // Only pop up the notification once - TODO consider daily nag? + // Callbacks + callbacks commontray.Callbacks + normalIcon []byte + updateIcon []byte +} + +var wt winTray + +func (t *winTray) GetCallbacks() commontray.Callbacks { + return t.callbacks +} + +func InitTray(icon, updateIcon []byte) (*winTray, error) { + wt.callbacks.Quit = make(chan struct{}) + wt.callbacks.Update = make(chan struct{}) + wt.callbacks.ShowLogs = make(chan struct{}) + wt.callbacks.DoFirstUse = make(chan struct{}) + wt.normalIcon = icon + wt.updateIcon = updateIcon + if err := wt.initInstance(); err != nil { + return nil, fmt.Errorf("Unable to init instance: %w\n", err) + } + + if err := wt.createMenu(); err != nil { + return nil, fmt.Errorf("Unable to create menu: %w\n", err) + } + + iconFilePath, err := iconBytesToFilePath(wt.normalIcon) + if err != nil { + return nil, fmt.Errorf("Unable to write icon data to temp file: %w", err) + } + if err := wt.setIcon(iconFilePath); err != nil { + return nil, fmt.Errorf("Unable to set icon: %w", err) + } + + return &wt, wt.initMenus() +} + +func (t *winTray) initInstance() error { + const ( + className = "OllamaClass" + windowName = "" + ) + + t.wmSystrayMessage = WM_USER + 1 + t.visibleItems = make(map[uint32][]uint32) + t.menus = make(map[uint32]windows.Handle) + t.menuOf = make(map[uint32]windows.Handle) + + t.loadedImages = make(map[string]windows.Handle) + + taskbarEventNamePtr, _ := windows.UTF16PtrFromString("TaskbarCreated") + // https://msdn.microsoft.com/en-us/library/windows/desktop/ms644947 + res, _, err := pRegisterWindowMessage.Call( + uintptr(unsafe.Pointer(taskbarEventNamePtr)), + ) + if res == 0 { // success 0xc000-0xfff + return fmt.Errorf("failed to register window: %w", err) + } + t.wmTaskbarCreated = uint32(res) + + instanceHandle, _, err := pGetModuleHandle.Call(0) + if instanceHandle == 0 { + return err + } + t.instance = windows.Handle(instanceHandle) + + // https://msdn.microsoft.com/en-us/library/windows/desktop/ms648072(v=vs.85).aspx + iconHandle, _, err := pLoadIcon.Call(0, uintptr(IDI_APPLICATION)) + if iconHandle == 0 { + return err + } + t.icon = windows.Handle(iconHandle) + + // https://msdn.microsoft.com/en-us/library/windows/desktop/ms648391(v=vs.85).aspx + cursorHandle, _, err := pLoadCursor.Call(0, uintptr(IDC_ARROW)) + if cursorHandle == 0 { + return err + } + t.cursor = windows.Handle(cursorHandle) + + classNamePtr, err := windows.UTF16PtrFromString(className) + if err != nil { + return err + } + + windowNamePtr, err := windows.UTF16PtrFromString(windowName) + if err != nil { + return err + } + + t.wcex = &wndClassEx{ + Style: CS_HREDRAW | CS_VREDRAW, + WndProc: windows.NewCallback(t.wndProc), + Instance: t.instance, + Icon: t.icon, + Cursor: t.cursor, + Background: windows.Handle(6), // (COLOR_WINDOW + 1) + ClassName: classNamePtr, + IconSm: t.icon, + } + if err := t.wcex.register(); err != nil { + return err + } + + windowHandle, _, err := pCreateWindowEx.Call( + uintptr(0), + uintptr(unsafe.Pointer(classNamePtr)), + uintptr(unsafe.Pointer(windowNamePtr)), + uintptr(WS_OVERLAPPEDWINDOW), + uintptr(CW_USEDEFAULT), + uintptr(CW_USEDEFAULT), + uintptr(CW_USEDEFAULT), + uintptr(CW_USEDEFAULT), + uintptr(0), + uintptr(0), + uintptr(t.instance), + uintptr(0), + ) + if windowHandle == 0 { + return err + } + t.window = windows.Handle(windowHandle) + + pShowWindow.Call(uintptr(t.window), uintptr(SW_HIDE)) //nolint:errcheck + + boolRet, _, err := pUpdateWindow.Call(uintptr(t.window)) + if boolRet == 0 { + slog.Error(fmt.Sprintf("failed to update window: %s", err)) + } + + t.muNID.Lock() + defer t.muNID.Unlock() + t.nid = ¬ifyIconData{ + Wnd: t.window, + ID: 100, + Flags: NIF_MESSAGE, + CallbackMessage: t.wmSystrayMessage, + } + t.nid.Size = uint32(unsafe.Sizeof(*t.nid)) + + return t.nid.add() +} + +func (t *winTray) createMenu() error { + menuHandle, _, err := pCreatePopupMenu.Call() + if menuHandle == 0 { + return err + } + t.menus[0] = windows.Handle(menuHandle) + + // https://msdn.microsoft.com/en-us/library/windows/desktop/ms647575(v=vs.85).aspx + mi := struct { + Size, Mask, Style, Max uint32 + Background windows.Handle + ContextHelpID uint32 + MenuData uintptr + }{ + Mask: MIM_APPLYTOSUBMENUS, + } + mi.Size = uint32(unsafe.Sizeof(mi)) + + res, _, err := pSetMenuInfo.Call( + uintptr(t.menus[0]), + uintptr(unsafe.Pointer(&mi)), + ) + if res == 0 { + return err + } + return nil +} + +// Contains information about a menu item. +// https://msdn.microsoft.com/en-us/library/windows/desktop/ms647578(v=vs.85).aspx +type menuItemInfo struct { + Size, Mask, Type, State uint32 + ID uint32 + SubMenu, Checked, Unchecked windows.Handle + ItemData uintptr + TypeData *uint16 + Cch uint32 + BMPItem windows.Handle +} + +func (t *winTray) addOrUpdateMenuItem(menuItemId uint32, parentId uint32, title string, disabled bool) error { + titlePtr, err := windows.UTF16PtrFromString(title) + if err != nil { + return err + } + + mi := menuItemInfo{ + Mask: MIIM_FTYPE | MIIM_STRING | MIIM_ID | MIIM_STATE, + Type: MFT_STRING, + ID: menuItemId, + TypeData: titlePtr, + Cch: uint32(len(title)), + } + mi.Size = uint32(unsafe.Sizeof(mi)) + if disabled { + mi.State |= MFS_DISABLED + } + + var res uintptr + t.muMenus.RLock() + menu := t.menus[parentId] + t.muMenus.RUnlock() + if t.getVisibleItemIndex(parentId, menuItemId) != -1 { + // We set the menu item info based on the menuID + boolRet, _, err := pSetMenuItemInfo.Call( + uintptr(menu), + uintptr(menuItemId), + 0, + uintptr(unsafe.Pointer(&mi)), + ) + if boolRet == 0 { + return fmt.Errorf("failed to set menu item: %w", err) + } + } + + if res == 0 { + // Menu item does not already exist, create it + t.muMenus.RLock() + submenu, exists := t.menus[menuItemId] + t.muMenus.RUnlock() + if exists { + mi.Mask |= MIIM_SUBMENU + mi.SubMenu = submenu + } + t.addToVisibleItems(parentId, menuItemId) + position := t.getVisibleItemIndex(parentId, menuItemId) + res, _, err = pInsertMenuItem.Call( + uintptr(menu), + uintptr(position), + 1, + uintptr(unsafe.Pointer(&mi)), + ) + if res == 0 { + t.delFromVisibleItems(parentId, menuItemId) + return err + } + t.muMenuOf.Lock() + t.menuOf[menuItemId] = menu + t.muMenuOf.Unlock() + } + + return nil +} + +func (t *winTray) addSeparatorMenuItem(menuItemId, parentId uint32) error { + mi := menuItemInfo{ + Mask: MIIM_FTYPE | MIIM_ID | MIIM_STATE, + Type: MFT_SEPARATOR, + ID: menuItemId, + } + + mi.Size = uint32(unsafe.Sizeof(mi)) + + t.addToVisibleItems(parentId, menuItemId) + position := t.getVisibleItemIndex(parentId, menuItemId) + t.muMenus.RLock() + menu := uintptr(t.menus[parentId]) + t.muMenus.RUnlock() + res, _, err := pInsertMenuItem.Call( + menu, + uintptr(position), + 1, + uintptr(unsafe.Pointer(&mi)), + ) + if res == 0 { + return err + } + + return nil +} + +// func (t *winTray) hideMenuItem(menuItemId, parentId uint32) error { +// const ERROR_SUCCESS syscall.Errno = 0 + +// t.muMenus.RLock() +// menu := uintptr(t.menus[parentId]) +// t.muMenus.RUnlock() +// res, _, err := pRemoveMenu.Call( +// menu, +// uintptr(menuItemId), +// MF_BYCOMMAND, +// ) +// if res == 0 && err.(syscall.Errno) != ERROR_SUCCESS { +// return err +// } +// t.delFromVisibleItems(parentId, menuItemId) + +// return nil +// } + +func (t *winTray) showMenu() error { + p := point{} + boolRet, _, err := pGetCursorPos.Call(uintptr(unsafe.Pointer(&p))) + if boolRet == 0 { + return err + } + boolRet, _, err = pSetForegroundWindow.Call(uintptr(t.window)) + if boolRet == 0 { + slog.Warn(fmt.Sprintf("failed to bring menu to foreground: %s", err)) + } + + boolRet, _, err = pTrackPopupMenu.Call( + uintptr(t.menus[0]), + TPM_BOTTOMALIGN|TPM_LEFTALIGN|TPM_RIGHTBUTTON, + uintptr(p.X), + uintptr(p.Y), + 0, + uintptr(t.window), + 0, + ) + if boolRet == 0 { + return err + } + + return nil +} + +func (t *winTray) delFromVisibleItems(parent, val uint32) { + t.muVisibleItems.Lock() + defer t.muVisibleItems.Unlock() + visibleItems := t.visibleItems[parent] + for i, itemval := range visibleItems { + if val == itemval { + t.visibleItems[parent] = append(visibleItems[:i], visibleItems[i+1:]...) + break + } + } +} + +func (t *winTray) addToVisibleItems(parent, val uint32) { + t.muVisibleItems.Lock() + defer t.muVisibleItems.Unlock() + if visibleItems, exists := t.visibleItems[parent]; !exists { + t.visibleItems[parent] = []uint32{val} + } else { + newvisible := append(visibleItems, val) + sort.Slice(newvisible, func(i, j int) bool { return newvisible[i] < newvisible[j] }) + t.visibleItems[parent] = newvisible + } +} + +func (t *winTray) getVisibleItemIndex(parent, val uint32) int { + t.muVisibleItems.RLock() + defer t.muVisibleItems.RUnlock() + for i, itemval := range t.visibleItems[parent] { + if val == itemval { + return i + } + } + return -1 +} + +func iconBytesToFilePath(iconBytes []byte) (string, error) { + bh := md5.Sum(iconBytes) + dataHash := hex.EncodeToString(bh[:]) + iconFilePath := filepath.Join(os.TempDir(), "ollama_temp_icon_"+dataHash) + + if _, err := os.Stat(iconFilePath); os.IsNotExist(err) { + if err := os.WriteFile(iconFilePath, iconBytes, 0o644); err != nil { + return "", err + } + } + return iconFilePath, nil +} + +// Loads an image from file and shows it in tray. +// Shell_NotifyIcon: https://msdn.microsoft.com/en-us/library/windows/desktop/bb762159(v=vs.85).aspx +func (t *winTray) setIcon(src string) error { + h, err := t.loadIconFrom(src) + if err != nil { + return err + } + + t.muNID.Lock() + defer t.muNID.Unlock() + t.nid.Icon = h + t.nid.Flags |= NIF_ICON | NIF_TIP + if toolTipUTF16, err := syscall.UTF16FromString(commontray.ToolTip); err == nil { + copy(t.nid.Tip[:], toolTipUTF16) + } else { + return err + } + t.nid.Size = uint32(unsafe.Sizeof(*t.nid)) + + return t.nid.modify() +} + +// Loads an image from file to be shown in tray or menu item. +// LoadImage: https://msdn.microsoft.com/en-us/library/windows/desktop/ms648045(v=vs.85).aspx +func (t *winTray) loadIconFrom(src string) (windows.Handle, error) { + // Save and reuse handles of loaded images + t.muLoadedImages.RLock() + h, ok := t.loadedImages[src] + t.muLoadedImages.RUnlock() + if !ok { + srcPtr, err := windows.UTF16PtrFromString(src) + if err != nil { + return 0, err + } + res, _, err := pLoadImage.Call( + 0, + uintptr(unsafe.Pointer(srcPtr)), + IMAGE_ICON, + 0, + 0, + LR_LOADFROMFILE|LR_DEFAULTSIZE, + ) + if res == 0 { + return 0, err + } + h = windows.Handle(res) + t.muLoadedImages.Lock() + t.loadedImages[src] = h + t.muLoadedImages.Unlock() + } + return h, nil +} + +func (t *winTray) DisplayFirstUseNotification() error { + t.muNID.Lock() + defer t.muNID.Unlock() + copy(t.nid.InfoTitle[:], windows.StringToUTF16(firstTimeTitle)) + copy(t.nid.Info[:], windows.StringToUTF16(firstTimeMessage)) + t.nid.Flags |= NIF_INFO + t.nid.Size = uint32(unsafe.Sizeof(*wt.nid)) + + return t.nid.modify() +} diff --git a/app/tray/wintray/w32api.go b/app/tray/wintray/w32api.go new file mode 100644 index 0000000..d23bfd9 --- /dev/null +++ b/app/tray/wintray/w32api.go @@ -0,0 +1,91 @@ +//go:build windows + +package wintray + +import ( + "runtime" + + "golang.org/x/sys/windows" +) + +var ( + k32 = windows.NewLazySystemDLL("Kernel32.dll") + u32 = windows.NewLazySystemDLL("User32.dll") + s32 = windows.NewLazySystemDLL("Shell32.dll") + + pCreatePopupMenu = u32.NewProc("CreatePopupMenu") + pCreateWindowEx = u32.NewProc("CreateWindowExW") + pDefWindowProc = u32.NewProc("DefWindowProcW") + pDestroyWindow = u32.NewProc("DestroyWindow") + pDispatchMessage = u32.NewProc("DispatchMessageW") + pGetCursorPos = u32.NewProc("GetCursorPos") + pGetMessage = u32.NewProc("GetMessageW") + pGetModuleHandle = k32.NewProc("GetModuleHandleW") + pInsertMenuItem = u32.NewProc("InsertMenuItemW") + pLoadCursor = u32.NewProc("LoadCursorW") + pLoadIcon = u32.NewProc("LoadIconW") + pLoadImage = u32.NewProc("LoadImageW") + pPostMessage = u32.NewProc("PostMessageW") + pPostQuitMessage = u32.NewProc("PostQuitMessage") + pRegisterClass = u32.NewProc("RegisterClassExW") + pRegisterWindowMessage = u32.NewProc("RegisterWindowMessageW") + pSetForegroundWindow = u32.NewProc("SetForegroundWindow") + pSetMenuInfo = u32.NewProc("SetMenuInfo") + pSetMenuItemInfo = u32.NewProc("SetMenuItemInfoW") + pShellNotifyIcon = s32.NewProc("Shell_NotifyIconW") + pShowWindow = u32.NewProc("ShowWindow") + pTrackPopupMenu = u32.NewProc("TrackPopupMenu") + pTranslateMessage = u32.NewProc("TranslateMessage") + pUnregisterClass = u32.NewProc("UnregisterClassW") + pUpdateWindow = u32.NewProc("UpdateWindow") +) + +const ( + CS_HREDRAW = 0x0002 + CS_VREDRAW = 0x0001 + CW_USEDEFAULT = 0x80000000 + IDC_ARROW = 32512 // Standard arrow + IDI_APPLICATION = 32512 + IMAGE_ICON = 1 // Loads an icon + LR_DEFAULTSIZE = 0x00000040 // Loads default-size icon for windows(SM_CXICON x SM_CYICON) if cx, cy are set to zero + LR_LOADFROMFILE = 0x00000010 // Loads the stand-alone image from the file + MF_BYCOMMAND = 0x00000000 + MFS_DISABLED = 0x00000003 + MFT_SEPARATOR = 0x00000800 + MFT_STRING = 0x00000000 + MIIM_BITMAP = 0x00000080 + MIIM_FTYPE = 0x00000100 + MIIM_ID = 0x00000002 + MIIM_STATE = 0x00000001 + MIIM_STRING = 0x00000040 + MIIM_SUBMENU = 0x00000004 + MIM_APPLYTOSUBMENUS = 0x80000000 + NIF_ICON = 0x00000002 + NIF_TIP = 0x00000004 + NIF_INFO = 0x00000010 + NIF_MESSAGE = 0x00000001 + SW_HIDE = 0 + TPM_BOTTOMALIGN = 0x0020 + TPM_LEFTALIGN = 0x0000 + TPM_RIGHTBUTTON = 0x0002 + WM_CLOSE = 0x0010 + WM_USER = 0x0400 + WS_CAPTION = 0x00C00000 + WS_MAXIMIZEBOX = 0x00010000 + WS_MINIMIZEBOX = 0x00020000 + WS_OVERLAPPED = 0x00000000 + WS_OVERLAPPEDWINDOW = WS_OVERLAPPED | WS_CAPTION | WS_SYSMENU | WS_THICKFRAME | WS_MINIMIZEBOX | WS_MAXIMIZEBOX + WS_SYSMENU = 0x00080000 + WS_THICKFRAME = 0x00040000 +) + +// Not sure if this is actually needed on windows +func init() { + runtime.LockOSThread() +} + +// The POINT structure defines the x- and y- coordinates of a point. +// https://msdn.microsoft.com/en-us/library/windows/desktop/dd162805(v=vs.85).aspx +type point struct { + X, Y int32 +} diff --git a/app/tray/wintray/winclass.go b/app/tray/wintray/winclass.go new file mode 100644 index 0000000..9ce71d0 --- /dev/null +++ b/app/tray/wintray/winclass.go @@ -0,0 +1,45 @@ +//go:build windows + +package wintray + +import ( + "unsafe" + + "golang.org/x/sys/windows" +) + +// Contains window class information. +// It is used with the RegisterClassEx and GetClassInfoEx functions. +// https://msdn.microsoft.com/en-us/library/ms633577.aspx +type wndClassEx struct { + Size, Style uint32 + WndProc uintptr + ClsExtra, WndExtra int32 + Instance, Icon, Cursor, Background windows.Handle + MenuName, ClassName *uint16 + IconSm windows.Handle +} + +// Registers a window class for subsequent use in calls to the CreateWindow or CreateWindowEx function. +// https://msdn.microsoft.com/en-us/library/ms633587.aspx +func (w *wndClassEx) register() error { + w.Size = uint32(unsafe.Sizeof(*w)) + res, _, err := pRegisterClass.Call(uintptr(unsafe.Pointer(w))) + if res == 0 { + return err + } + return nil +} + +// Unregisters a window class, freeing the memory required for the class. +// https://msdn.microsoft.com/en-us/library/ms644899.aspx +func (w *wndClassEx) unregister() error { + res, _, err := pUnregisterClass.Call( + uintptr(unsafe.Pointer(w.ClassName)), + uintptr(w.Instance), + ) + if res == 0 { + return err + } + return nil +} diff --git a/auth/auth.go b/auth/auth.go new file mode 100644 index 0000000..e1d8541 --- /dev/null +++ b/auth/auth.go @@ -0,0 +1,92 @@ +package auth + +import ( + "bytes" + "context" + "crypto/rand" + "encoding/base64" + "errors" + "fmt" + "io" + "log/slog" + "os" + "path/filepath" + "strings" + + "golang.org/x/crypto/ssh" +) + +const defaultPrivateKey = "id_ed25519" + +func keyPath() (string, error) { + home, err := os.UserHomeDir() + if err != nil { + return "", err + } + + return filepath.Join(home, ".ollama", defaultPrivateKey), nil +} + +func GetPublicKey() (string, error) { + keyPath, err := keyPath() + if err != nil { + return "", err + } + + privateKeyFile, err := os.ReadFile(keyPath) + if err != nil { + slog.Info(fmt.Sprintf("Failed to load private key: %v", err)) + return "", err + } + + privateKey, err := ssh.ParsePrivateKey(privateKeyFile) + if err != nil { + return "", err + } + + publicKey := ssh.MarshalAuthorizedKey(privateKey.PublicKey()) + + return strings.TrimSpace(string(publicKey)), nil +} + +func NewNonce(r io.Reader, length int) (string, error) { + nonce := make([]byte, length) + if _, err := io.ReadFull(r, nonce); err != nil { + return "", err + } + + return base64.RawURLEncoding.EncodeToString(nonce), nil +} + +func Sign(ctx context.Context, bts []byte) (string, error) { + keyPath, err := keyPath() + if err != nil { + return "", err + } + + privateKeyFile, err := os.ReadFile(keyPath) + if err != nil { + slog.Info(fmt.Sprintf("Failed to load private key: %v", err)) + return "", err + } + + privateKey, err := ssh.ParsePrivateKey(privateKeyFile) + if err != nil { + return "", err + } + + // get the pubkey, but remove the type + publicKey := ssh.MarshalAuthorizedKey(privateKey.PublicKey()) + parts := bytes.Split(publicKey, []byte(" ")) + if len(parts) < 2 { + return "", errors.New("malformed public key") + } + + signedData, err := privateKey.Sign(rand.Reader, bts) + if err != nil { + return "", err + } + + // signature is : + return fmt.Sprintf("%s:%s", bytes.TrimSpace(parts[1]), base64.StdEncoding.EncodeToString(signedData.Blob)), nil +} diff --git a/benchmark/server_benchmark_test.go b/benchmark/server_benchmark_test.go new file mode 100644 index 0000000..4a3c46c --- /dev/null +++ b/benchmark/server_benchmark_test.go @@ -0,0 +1,178 @@ +package benchmark + +import ( + "context" + "flag" + "fmt" + "testing" + "time" + + "github.com/ollama/ollama/api" +) + +// Command line flags +var modelFlag string + +func init() { + flag.StringVar(&modelFlag, "m", "", "Name of the model to benchmark") + flag.Lookup("m").DefValue = "model" +} + +// modelName returns the model name from flags, failing the test if not set +func modelName(b *testing.B) string { + if modelFlag == "" { + b.Fatal("Error: -m flag is required for benchmark tests") + } + return modelFlag +} + +type TestCase struct { + name string + prompt string + maxTokens int +} + +// runGenerateBenchmark contains the common generate and metrics logic +func runGenerateBenchmark(b *testing.B, ctx context.Context, client *api.Client, req *api.GenerateRequest) { + start := time.Now() + var ttft time.Duration + var metrics api.Metrics + + err := client.Generate(ctx, req, func(resp api.GenerateResponse) error { + if ttft == 0 && resp.Response != "" { + ttft = time.Since(start) + } + if resp.Done { + metrics = resp.Metrics + } + return nil + }) + + // Report custom metrics as part of the benchmark results + b.ReportMetric(float64(ttft.Milliseconds()), "ttft_ms") + b.ReportMetric(float64(metrics.LoadDuration.Milliseconds()), "load_ms") + + // Token throughput metrics + promptThroughput := float64(metrics.PromptEvalCount) / metrics.PromptEvalDuration.Seconds() + genThroughput := float64(metrics.EvalCount) / metrics.EvalDuration.Seconds() + b.ReportMetric(promptThroughput, "prompt_tok/s") + b.ReportMetric(genThroughput, "gen_tok/s") + + // Token counts + b.ReportMetric(float64(metrics.PromptEvalCount), "prompt_tokens") + b.ReportMetric(float64(metrics.EvalCount), "gen_tokens") + if err != nil { + b.Fatal(err) + } +} + +// BenchmarkColdStart runs benchmarks with model loading from cold state +func BenchmarkColdStart(b *testing.B) { + client := setup(b) + tests := []TestCase{ + {"short_prompt", "Write a long story", 100}, + {"medium_prompt", "Write a detailed economic analysis", 500}, + {"long_prompt", "Write a comprehensive AI research paper", 1000}, + } + m := modelName(b) + + for _, tt := range tests { + b.Run(fmt.Sprintf("%s/cold/%s", m, tt.name), func(b *testing.B) { + ctx := b.Context() + + // Set number of tokens as our throughput metric + b.SetBytes(int64(tt.maxTokens)) + + for b.Loop() { + b.StopTimer() + // Ensure model is unloaded before each iteration + unload(client, m, b) + b.StartTimer() + + req := &api.GenerateRequest{ + Model: m, + Prompt: tt.prompt, + Options: map[string]any{"num_predict": tt.maxTokens, "temperature": 0.1}, + } + + runGenerateBenchmark(b, ctx, client, req) + } + }) + } +} + +// BenchmarkWarmStart runs benchmarks with pre-loaded model +func BenchmarkWarmStart(b *testing.B) { + client := setup(b) + tests := []TestCase{ + {"short_prompt", "Write a long story", 100}, + {"medium_prompt", "Write a detailed economic analysis", 500}, + {"long_prompt", "Write a comprehensive AI research paper", 1000}, + } + m := modelName(b) + + for _, tt := range tests { + b.Run(fmt.Sprintf("%s/warm/%s", m, tt.name), func(b *testing.B) { + ctx := b.Context() + + // Pre-warm the model + warmup(client, m, tt.prompt, b) + + // Set number of tokens as our throughput metric + b.SetBytes(int64(tt.maxTokens)) + + for b.Loop() { + req := &api.GenerateRequest{ + Model: m, + Prompt: tt.prompt, + Options: map[string]any{"num_predict": tt.maxTokens, "temperature": 0.1}, + } + + runGenerateBenchmark(b, ctx, client, req) + } + }) + } +} + +// setup verifies server and model availability +func setup(b *testing.B) *api.Client { + client, err := api.ClientFromEnvironment() + if err != nil { + b.Fatal(err) + } + if _, err := client.Show(b.Context(), &api.ShowRequest{Model: modelName(b)}); err != nil { + b.Fatalf("Model unavailable: %v", err) + } + + return client +} + +// warmup ensures the model is loaded and warmed up +func warmup(client *api.Client, model string, prompt string, b *testing.B) { + for range 3 { + err := client.Generate( + context.Background(), + &api.GenerateRequest{ + Model: model, + Prompt: prompt, + Options: map[string]any{"num_predict": 50, "temperature": 0.1}, + }, + func(api.GenerateResponse) error { return nil }, + ) + if err != nil { + b.Logf("Error during model warm-up: %v", err) + } + } +} + +// unload forces model unloading using KeepAlive: 0 parameter +func unload(client *api.Client, model string, b *testing.B) { + req := &api.GenerateRequest{ + Model: model, + KeepAlive: &api.Duration{Duration: 0}, + } + if err := client.Generate(context.Background(), req, func(api.GenerateResponse) error { return nil }); err != nil { + b.Logf("Unload error: %v", err) + } + time.Sleep(1 * time.Second) +} diff --git a/cmd/cmd.go b/cmd/cmd.go new file mode 100644 index 0000000..ad4be7f --- /dev/null +++ b/cmd/cmd.go @@ -0,0 +1,1490 @@ +package cmd + +import ( + "bufio" + "context" + "crypto/ed25519" + "crypto/rand" + "encoding/json" + "encoding/pem" + "errors" + "fmt" + "io" + "log" + "math" + "net" + "net/http" + "os" + "os/signal" + "path/filepath" + "runtime" + "slices" + "sort" + "strconv" + "strings" + "sync/atomic" + "syscall" + "time" + + "github.com/containerd/console" + "github.com/mattn/go-runewidth" + "github.com/olekukonko/tablewriter" + "github.com/spf13/cobra" + "golang.org/x/crypto/ssh" + "golang.org/x/sync/errgroup" + "golang.org/x/term" + + "github.com/ollama/ollama/api" + "github.com/ollama/ollama/envconfig" + "github.com/ollama/ollama/format" + "github.com/ollama/ollama/parser" + "github.com/ollama/ollama/progress" + "github.com/ollama/ollama/runner" + "github.com/ollama/ollama/server" + "github.com/ollama/ollama/types/model" + "github.com/ollama/ollama/types/syncmap" + "github.com/ollama/ollama/version" +) + +var errModelfileNotFound = errors.New("specified Modelfile wasn't found") + +func getModelfileName(cmd *cobra.Command) (string, error) { + filename, _ := cmd.Flags().GetString("file") + + if filename == "" { + filename = "Modelfile" + } + + absName, err := filepath.Abs(filename) + if err != nil { + return "", err + } + + _, err = os.Stat(absName) + if err != nil { + return "", err + } + + return absName, nil +} + +func CreateHandler(cmd *cobra.Command, args []string) error { + p := progress.NewProgress(os.Stderr) + defer p.Stop() + + var reader io.Reader + + filename, err := getModelfileName(cmd) + if os.IsNotExist(err) { + if filename == "" { + reader = strings.NewReader("FROM .\n") + } else { + return errModelfileNotFound + } + } else if err != nil { + return err + } else { + f, err := os.Open(filename) + if err != nil { + return err + } + + reader = f + defer f.Close() + } + + modelfile, err := parser.ParseFile(reader) + if err != nil { + return err + } + + status := "gathering model components" + spinner := progress.NewSpinner(status) + p.Add(status, spinner) + + req, err := modelfile.CreateRequest(filepath.Dir(filename)) + if err != nil { + return err + } + spinner.Stop() + + req.Model = args[0] + quantize, _ := cmd.Flags().GetString("quantize") + if quantize != "" { + req.Quantize = quantize + } + + client, err := api.ClientFromEnvironment() + if err != nil { + return err + } + + var g errgroup.Group + g.SetLimit(max(runtime.GOMAXPROCS(0)-1, 1)) + + files := syncmap.NewSyncMap[string, string]() + for f, digest := range req.Files { + g.Go(func() error { + if _, err := createBlob(cmd, client, f, digest, p); err != nil { + return err + } + + // TODO: this is incorrect since the file might be in a subdirectory + // instead this should take the path relative to the model directory + // but the current implementation does not allow this + files.Store(filepath.Base(f), digest) + return nil + }) + } + + adapters := syncmap.NewSyncMap[string, string]() + for f, digest := range req.Adapters { + g.Go(func() error { + if _, err := createBlob(cmd, client, f, digest, p); err != nil { + return err + } + + // TODO: same here + adapters.Store(filepath.Base(f), digest) + return nil + }) + } + + if err := g.Wait(); err != nil { + return err + } + + req.Files = files.Items() + req.Adapters = adapters.Items() + + bars := make(map[string]*progress.Bar) + fn := func(resp api.ProgressResponse) error { + if resp.Digest != "" { + bar, ok := bars[resp.Digest] + if !ok { + msg := resp.Status + if msg == "" { + msg = fmt.Sprintf("pulling %s...", resp.Digest[7:19]) + } + bar = progress.NewBar(msg, resp.Total, resp.Completed) + bars[resp.Digest] = bar + p.Add(resp.Digest, bar) + } + + bar.Set(resp.Completed) + } else if status != resp.Status { + spinner.Stop() + + status = resp.Status + spinner = progress.NewSpinner(status) + p.Add(status, spinner) + } + + return nil + } + + if err := client.Create(cmd.Context(), req, fn); err != nil { + if strings.Contains(err.Error(), "path or Modelfile are required") { + return fmt.Errorf("the ollama server must be updated to use `ollama create` with this client") + } + return err + } + + return nil +} + +func createBlob(cmd *cobra.Command, client *api.Client, path string, digest string, p *progress.Progress) (string, error) { + realPath, err := filepath.EvalSymlinks(path) + if err != nil { + return "", err + } + + bin, err := os.Open(realPath) + if err != nil { + return "", err + } + defer bin.Close() + + // Get file info to retrieve the size + fileInfo, err := bin.Stat() + if err != nil { + return "", err + } + fileSize := fileInfo.Size() + + var pw progressWriter + status := fmt.Sprintf("copying file %s 0%%", digest) + spinner := progress.NewSpinner(status) + p.Add(status, spinner) + defer spinner.Stop() + + done := make(chan struct{}) + defer close(done) + + go func() { + ticker := time.NewTicker(60 * time.Millisecond) + defer ticker.Stop() + for { + select { + case <-ticker.C: + spinner.SetMessage(fmt.Sprintf("copying file %s %d%%", digest, int(100*pw.n.Load()/fileSize))) + case <-done: + spinner.SetMessage(fmt.Sprintf("copying file %s 100%%", digest)) + return + } + } + }() + + if err := client.CreateBlob(cmd.Context(), digest, io.TeeReader(bin, &pw)); err != nil { + return "", err + } + return digest, nil +} + +type progressWriter struct { + n atomic.Int64 +} + +func (w *progressWriter) Write(p []byte) (n int, err error) { + w.n.Add(int64(len(p))) + return len(p), nil +} + +func loadOrUnloadModel(cmd *cobra.Command, opts *runOptions) error { + p := progress.NewProgress(os.Stderr) + defer p.StopAndClear() + + spinner := progress.NewSpinner("") + p.Add("", spinner) + + client, err := api.ClientFromEnvironment() + if err != nil { + return err + } + + req := &api.GenerateRequest{ + Model: opts.Model, + KeepAlive: opts.KeepAlive, + } + + return client.Generate(cmd.Context(), req, func(api.GenerateResponse) error { return nil }) +} + +func StopHandler(cmd *cobra.Command, args []string) error { + opts := &runOptions{ + Model: args[0], + KeepAlive: &api.Duration{Duration: 0}, + } + if err := loadOrUnloadModel(cmd, opts); err != nil { + if strings.Contains(err.Error(), "not found") { + return fmt.Errorf("couldn't find model \"%s\" to stop", args[0]) + } + return err + } + return nil +} + +func RunHandler(cmd *cobra.Command, args []string) error { + interactive := true + + opts := runOptions{ + Model: args[0], + WordWrap: os.Getenv("TERM") == "xterm-256color", + Options: map[string]any{}, + } + + format, err := cmd.Flags().GetString("format") + if err != nil { + return err + } + opts.Format = format + + keepAlive, err := cmd.Flags().GetString("keepalive") + if err != nil { + return err + } + if keepAlive != "" { + d, err := time.ParseDuration(keepAlive) + if err != nil { + return err + } + opts.KeepAlive = &api.Duration{Duration: d} + } + + prompts := args[1:] + // prepend stdin to the prompt if provided + if !term.IsTerminal(int(os.Stdin.Fd())) { + in, err := io.ReadAll(os.Stdin) + if err != nil { + return err + } + + prompts = append([]string{string(in)}, prompts...) + opts.WordWrap = false + interactive = false + } + opts.Prompt = strings.Join(prompts, " ") + if len(prompts) > 0 { + interactive = false + } + // Be quiet if we're redirecting to a pipe or file + if !term.IsTerminal(int(os.Stdout.Fd())) { + interactive = false + } + + nowrap, err := cmd.Flags().GetBool("nowordwrap") + if err != nil { + return err + } + opts.WordWrap = !nowrap + + // Fill out the rest of the options based on information about the + // model. + client, err := api.ClientFromEnvironment() + if err != nil { + return err + } + + name := args[0] + info, err := func() (*api.ShowResponse, error) { + showReq := &api.ShowRequest{Name: name} + info, err := client.Show(cmd.Context(), showReq) + var se api.StatusError + if errors.As(err, &se) && se.StatusCode == http.StatusNotFound { + if err := PullHandler(cmd, []string{name}); err != nil { + return nil, err + } + return client.Show(cmd.Context(), &api.ShowRequest{Name: name}) + } + return info, err + }() + if err != nil { + return err + } + + opts.MultiModal = slices.Contains(info.Capabilities, model.CapabilityVision) + + // TODO: remove the projector info and vision info checks below, + // these are left in for backwards compatibility with older servers + // that don't have the capabilities field in the model info + if len(info.ProjectorInfo) != 0 { + opts.MultiModal = true + } + for k := range info.ModelInfo { + if strings.Contains(k, ".vision.") { + opts.MultiModal = true + break + } + } + + opts.ParentModel = info.Details.ParentModel + + if interactive { + if err := loadOrUnloadModel(cmd, &opts); err != nil { + return err + } + + for _, msg := range info.Messages { + switch msg.Role { + case "user": + fmt.Printf(">>> %s\n", msg.Content) + case "assistant": + state := &displayResponseState{} + displayResponse(msg.Content, opts.WordWrap, state) + fmt.Println() + fmt.Println() + } + } + + return generateInteractive(cmd, opts) + } + return generate(cmd, opts) +} + +func PushHandler(cmd *cobra.Command, args []string) error { + client, err := api.ClientFromEnvironment() + if err != nil { + return err + } + + insecure, err := cmd.Flags().GetBool("insecure") + if err != nil { + return err + } + + p := progress.NewProgress(os.Stderr) + defer p.Stop() + + bars := make(map[string]*progress.Bar) + var status string + var spinner *progress.Spinner + + fn := func(resp api.ProgressResponse) error { + if resp.Digest != "" { + if spinner != nil { + spinner.Stop() + } + + bar, ok := bars[resp.Digest] + if !ok { + bar = progress.NewBar(fmt.Sprintf("pushing %s...", resp.Digest[7:19]), resp.Total, resp.Completed) + bars[resp.Digest] = bar + p.Add(resp.Digest, bar) + } + + bar.Set(resp.Completed) + } else if status != resp.Status { + if spinner != nil { + spinner.Stop() + } + + status = resp.Status + spinner = progress.NewSpinner(status) + p.Add(status, spinner) + } + + return nil + } + + request := api.PushRequest{Name: args[0], Insecure: insecure} + + n := model.ParseName(args[0]) + if err := client.Push(cmd.Context(), &request, fn); err != nil { + if spinner != nil { + spinner.Stop() + } + if strings.Contains(err.Error(), "access denied") { + return errors.New("you are not authorized to push to this namespace, create the model under a namespace you own") + } + return err + } + + p.Stop() + spinner.Stop() + + destination := n.String() + if strings.HasSuffix(n.Host, ".ollama.ai") || strings.HasSuffix(n.Host, ".ollama.com") { + destination = "https://ollama.com/" + strings.TrimSuffix(n.DisplayShortest(), ":latest") + } + fmt.Printf("\nYou can find your model at:\n\n") + fmt.Printf("\t%s\n", destination) + + return nil +} + +func ListHandler(cmd *cobra.Command, args []string) error { + client, err := api.ClientFromEnvironment() + if err != nil { + return err + } + + models, err := client.List(cmd.Context()) + if err != nil { + return err + } + + var data [][]string + + for _, m := range models.Models { + if len(args) == 0 || strings.HasPrefix(strings.ToLower(m.Name), strings.ToLower(args[0])) { + data = append(data, []string{m.Name, m.Digest[:12], format.HumanBytes(m.Size), format.HumanTime(m.ModifiedAt, "Never")}) + } + } + + table := tablewriter.NewWriter(os.Stdout) + table.SetHeader([]string{"NAME", "ID", "SIZE", "MODIFIED"}) + table.SetHeaderAlignment(tablewriter.ALIGN_LEFT) + table.SetAlignment(tablewriter.ALIGN_LEFT) + table.SetHeaderLine(false) + table.SetBorder(false) + table.SetNoWhiteSpace(true) + table.SetTablePadding(" ") + table.AppendBulk(data) + table.Render() + + return nil +} + +func ListRunningHandler(cmd *cobra.Command, args []string) error { + client, err := api.ClientFromEnvironment() + if err != nil { + return err + } + + models, err := client.ListRunning(cmd.Context()) + if err != nil { + return err + } + + var data [][]string + + for _, m := range models.Models { + if len(args) == 0 || strings.HasPrefix(m.Name, args[0]) { + var procStr string + switch { + case m.SizeVRAM == 0: + procStr = "100% CPU" + case m.SizeVRAM == m.Size: + procStr = "100% GPU" + case m.SizeVRAM > m.Size || m.Size == 0: + procStr = "Unknown" + default: + sizeCPU := m.Size - m.SizeVRAM + cpuPercent := math.Round(float64(sizeCPU) / float64(m.Size) * 100) + procStr = fmt.Sprintf("%d%%/%d%% CPU/GPU", int(cpuPercent), int(100-cpuPercent)) + } + + var until string + delta := time.Since(m.ExpiresAt) + if delta > 0 { + until = "Stopping..." + } else { + until = format.HumanTime(m.ExpiresAt, "Never") + } + data = append(data, []string{m.Name, m.Digest[:12], format.HumanBytes(m.Size), procStr, until}) + } + } + + table := tablewriter.NewWriter(os.Stdout) + table.SetHeader([]string{"NAME", "ID", "SIZE", "PROCESSOR", "UNTIL"}) + table.SetHeaderAlignment(tablewriter.ALIGN_LEFT) + table.SetAlignment(tablewriter.ALIGN_LEFT) + table.SetHeaderLine(false) + table.SetBorder(false) + table.SetNoWhiteSpace(true) + table.SetTablePadding(" ") + table.AppendBulk(data) + table.Render() + + return nil +} + +func DeleteHandler(cmd *cobra.Command, args []string) error { + client, err := api.ClientFromEnvironment() + if err != nil { + return err + } + + // Unload the model if it's running before deletion + opts := &runOptions{ + Model: args[0], + KeepAlive: &api.Duration{Duration: 0}, + } + if err := loadOrUnloadModel(cmd, opts); err != nil { + if !strings.Contains(err.Error(), "not found") { + return fmt.Errorf("unable to stop existing running model \"%s\": %s", args[0], err) + } + } + + for _, name := range args { + req := api.DeleteRequest{Name: name} + if err := client.Delete(cmd.Context(), &req); err != nil { + return err + } + fmt.Printf("deleted '%s'\n", name) + } + return nil +} + +func ShowHandler(cmd *cobra.Command, args []string) error { + client, err := api.ClientFromEnvironment() + if err != nil { + return err + } + + license, errLicense := cmd.Flags().GetBool("license") + modelfile, errModelfile := cmd.Flags().GetBool("modelfile") + parameters, errParams := cmd.Flags().GetBool("parameters") + system, errSystem := cmd.Flags().GetBool("system") + template, errTemplate := cmd.Flags().GetBool("template") + verbose, errVerbose := cmd.Flags().GetBool("verbose") + + for _, boolErr := range []error{errLicense, errModelfile, errParams, errSystem, errTemplate, errVerbose} { + if boolErr != nil { + return errors.New("error retrieving flags") + } + } + + flagsSet := 0 + showType := "" + + if license { + flagsSet++ + showType = "license" + } + + if modelfile { + flagsSet++ + showType = "modelfile" + } + + if parameters { + flagsSet++ + showType = "parameters" + } + + if system { + flagsSet++ + showType = "system" + } + + if template { + flagsSet++ + showType = "template" + } + + if flagsSet > 1 { + return errors.New("only one of '--license', '--modelfile', '--parameters', '--system', or '--template' can be specified") + } + + req := api.ShowRequest{Name: args[0], Verbose: verbose} + resp, err := client.Show(cmd.Context(), &req) + if err != nil { + return err + } + + if flagsSet == 1 { + switch showType { + case "license": + fmt.Println(resp.License) + case "modelfile": + fmt.Println(resp.Modelfile) + case "parameters": + fmt.Println(resp.Parameters) + case "system": + fmt.Print(resp.System) + case "template": + fmt.Print(resp.Template) + } + + return nil + } + + return showInfo(resp, verbose, os.Stdout) +} + +func showInfo(resp *api.ShowResponse, verbose bool, w io.Writer) error { + tableRender := func(header string, rows func() [][]string) { + fmt.Fprintln(w, " ", header) + table := tablewriter.NewWriter(w) + table.SetAlignment(tablewriter.ALIGN_LEFT) + table.SetBorder(false) + table.SetNoWhiteSpace(true) + table.SetTablePadding(" ") + + switch header { + case "Template", "System", "License": + table.SetColWidth(100) + } + + table.AppendBulk(rows()) + table.Render() + fmt.Fprintln(w) + } + + tableRender("Model", func() (rows [][]string) { + if resp.ModelInfo != nil { + arch := resp.ModelInfo["general.architecture"].(string) + rows = append(rows, []string{"", "architecture", arch}) + rows = append(rows, []string{"", "parameters", format.HumanNumber(uint64(resp.ModelInfo["general.parameter_count"].(float64)))}) + rows = append(rows, []string{"", "context length", strconv.FormatFloat(resp.ModelInfo[fmt.Sprintf("%s.context_length", arch)].(float64), 'f', -1, 64)}) + rows = append(rows, []string{"", "embedding length", strconv.FormatFloat(resp.ModelInfo[fmt.Sprintf("%s.embedding_length", arch)].(float64), 'f', -1, 64)}) + } else { + rows = append(rows, []string{"", "architecture", resp.Details.Family}) + rows = append(rows, []string{"", "parameters", resp.Details.ParameterSize}) + } + rows = append(rows, []string{"", "quantization", resp.Details.QuantizationLevel}) + return + }) + + if len(resp.Capabilities) > 0 { + tableRender("Capabilities", func() (rows [][]string) { + for _, capability := range resp.Capabilities { + rows = append(rows, []string{"", capability.String()}) + } + return + }) + } + + if resp.ProjectorInfo != nil { + tableRender("Projector", func() (rows [][]string) { + arch := resp.ProjectorInfo["general.architecture"].(string) + rows = append(rows, []string{"", "architecture", arch}) + rows = append(rows, []string{"", "parameters", format.HumanNumber(uint64(resp.ProjectorInfo["general.parameter_count"].(float64)))}) + rows = append(rows, []string{"", "embedding length", strconv.FormatFloat(resp.ProjectorInfo[fmt.Sprintf("%s.vision.embedding_length", arch)].(float64), 'f', -1, 64)}) + rows = append(rows, []string{"", "dimensions", strconv.FormatFloat(resp.ProjectorInfo[fmt.Sprintf("%s.vision.projection_dim", arch)].(float64), 'f', -1, 64)}) + return + }) + } + + if resp.Parameters != "" { + tableRender("Parameters", func() (rows [][]string) { + scanner := bufio.NewScanner(strings.NewReader(resp.Parameters)) + for scanner.Scan() { + if text := scanner.Text(); text != "" { + rows = append(rows, append([]string{""}, strings.Fields(text)...)) + } + } + return + }) + } + + if resp.ModelInfo != nil && verbose { + tableRender("Metadata", func() (rows [][]string) { + keys := make([]string, 0, len(resp.ModelInfo)) + for k := range resp.ModelInfo { + keys = append(keys, k) + } + sort.Strings(keys) + + for _, k := range keys { + var v string + switch vData := resp.ModelInfo[k].(type) { + case bool: + v = fmt.Sprintf("%t", vData) + case string: + v = vData + case float64: + v = fmt.Sprintf("%g", vData) + case []any: + targetWidth := 10 // Small width where we are displaying the data in a column + + var itemsToShow int + totalWidth := 1 // Start with 1 for opening bracket + + // Find how many we can fit + for i := range vData { + itemStr := fmt.Sprintf("%v", vData[i]) + width := runewidth.StringWidth(itemStr) + + // Add separator width (", ") for all items except the first + if i > 0 { + width += 2 + } + + // Check if adding this item would exceed our width limit + if totalWidth+width > targetWidth && i > 0 { + break + } + + totalWidth += width + itemsToShow++ + } + + // Format the output + if itemsToShow < len(vData) { + v = fmt.Sprintf("%v", vData[:itemsToShow]) + v = strings.TrimSuffix(v, "]") + v += fmt.Sprintf(" ...+%d more]", len(vData)-itemsToShow) + } else { + v = fmt.Sprintf("%v", vData) + } + default: + v = fmt.Sprintf("%T", vData) + } + rows = append(rows, []string{"", k, v}) + } + return + }) + } + + if len(resp.Tensors) > 0 && verbose { + tableRender("Tensors", func() (rows [][]string) { + for _, t := range resp.Tensors { + rows = append(rows, []string{"", t.Name, t.Type, fmt.Sprint(t.Shape)}) + } + return + }) + } + + head := func(s string, n int) (rows [][]string) { + scanner := bufio.NewScanner(strings.NewReader(s)) + count := 0 + for scanner.Scan() { + text := strings.TrimSpace(scanner.Text()) + if text == "" { + continue + } + count++ + if n < 0 || count <= n { + rows = append(rows, []string{"", text}) + } + } + if n >= 0 && count > n { + rows = append(rows, []string{"", "..."}) + } + return + } + + if resp.System != "" { + tableRender("System", func() [][]string { + return head(resp.System, 2) + }) + } + + if resp.License != "" { + tableRender("License", func() [][]string { + return head(resp.License, 2) + }) + } + + return nil +} + +func CopyHandler(cmd *cobra.Command, args []string) error { + client, err := api.ClientFromEnvironment() + if err != nil { + return err + } + + req := api.CopyRequest{Source: args[0], Destination: args[1]} + if err := client.Copy(cmd.Context(), &req); err != nil { + return err + } + fmt.Printf("copied '%s' to '%s'\n", args[0], args[1]) + return nil +} + +func PullHandler(cmd *cobra.Command, args []string) error { + insecure, err := cmd.Flags().GetBool("insecure") + if err != nil { + return err + } + + client, err := api.ClientFromEnvironment() + if err != nil { + return err + } + + p := progress.NewProgress(os.Stderr) + defer p.Stop() + + bars := make(map[string]*progress.Bar) + + var status string + var spinner *progress.Spinner + + fn := func(resp api.ProgressResponse) error { + if resp.Digest != "" { + if resp.Completed == 0 { + // This is the initial status update for the + // layer, which the server sends before + // beginning the download, for clients to + // compute total size and prepare for + // downloads, if needed. + // + // Skipping this here to avoid showing a 0% + // progress bar, which *should* clue the user + // into the fact that many things are being + // downloaded and that the current active + // download is not that last. However, in rare + // cases it seems to be triggering to some, and + // it isn't worth explaining, so just ignore + // and regress to the old UI that keeps giving + // you the "But wait, there is more!" after + // each "100% done" bar, which is "better." + return nil + } + + if spinner != nil { + spinner.Stop() + } + + bar, ok := bars[resp.Digest] + if !ok { + name, isDigest := strings.CutPrefix(resp.Digest, "sha256:") + name = strings.TrimSpace(name) + if isDigest { + name = name[:min(12, len(name))] + } + bar = progress.NewBar(fmt.Sprintf("pulling %s:", name), resp.Total, resp.Completed) + bars[resp.Digest] = bar + p.Add(resp.Digest, bar) + } + + bar.Set(resp.Completed) + } else if status != resp.Status { + if spinner != nil { + spinner.Stop() + } + + status = resp.Status + spinner = progress.NewSpinner(status) + p.Add(status, spinner) + } + + return nil + } + + request := api.PullRequest{Name: args[0], Insecure: insecure} + return client.Pull(cmd.Context(), &request, fn) +} + +type generateContextKey string + +type runOptions struct { + Model string + ParentModel string + Prompt string + Messages []api.Message + WordWrap bool + Format string + System string + Images []api.ImageData + Options map[string]any + MultiModal bool + KeepAlive *api.Duration +} + +type displayResponseState struct { + lineLength int + wordBuffer string +} + +func displayResponse(content string, wordWrap bool, state *displayResponseState) { + termWidth, _, _ := term.GetSize(int(os.Stdout.Fd())) + if wordWrap && termWidth >= 10 { + for _, ch := range content { + if state.lineLength+1 > termWidth-5 { + if runewidth.StringWidth(state.wordBuffer) > termWidth-10 { + fmt.Printf("%s%c", state.wordBuffer, ch) + state.wordBuffer = "" + state.lineLength = 0 + continue + } + + // backtrack the length of the last word and clear to the end of the line + a := runewidth.StringWidth(state.wordBuffer) + if a > 0 { + fmt.Printf("\x1b[%dD", a) + } + fmt.Printf("\x1b[K\n") + fmt.Printf("%s%c", state.wordBuffer, ch) + chWidth := runewidth.RuneWidth(ch) + + state.lineLength = runewidth.StringWidth(state.wordBuffer) + chWidth + } else { + fmt.Print(string(ch)) + state.lineLength += runewidth.RuneWidth(ch) + if runewidth.RuneWidth(ch) >= 2 { + state.wordBuffer = "" + continue + } + + switch ch { + case ' ': + state.wordBuffer = "" + case '\n': + state.lineLength = 0 + default: + state.wordBuffer += string(ch) + } + } + } + } else { + fmt.Printf("%s%s", state.wordBuffer, content) + if len(state.wordBuffer) > 0 { + state.wordBuffer = "" + } + } +} + +func chat(cmd *cobra.Command, opts runOptions) (*api.Message, error) { + client, err := api.ClientFromEnvironment() + if err != nil { + return nil, err + } + + p := progress.NewProgress(os.Stderr) + defer p.StopAndClear() + + spinner := progress.NewSpinner("") + p.Add("", spinner) + + cancelCtx, cancel := context.WithCancel(cmd.Context()) + defer cancel() + + sigChan := make(chan os.Signal, 1) + signal.Notify(sigChan, syscall.SIGINT) + + go func() { + <-sigChan + cancel() + }() + + var state *displayResponseState = &displayResponseState{} + var latest api.ChatResponse + var fullResponse strings.Builder + var role string + + fn := func(response api.ChatResponse) error { + p.StopAndClear() + + latest = response + + role = response.Message.Role + content := response.Message.Content + fullResponse.WriteString(content) + + displayResponse(content, opts.WordWrap, state) + + return nil + } + + if opts.Format == "json" { + opts.Format = `"` + opts.Format + `"` + } + + req := &api.ChatRequest{ + Model: opts.Model, + Messages: opts.Messages, + Format: json.RawMessage(opts.Format), + Options: opts.Options, + } + + if opts.KeepAlive != nil { + req.KeepAlive = opts.KeepAlive + } + + if err := client.Chat(cancelCtx, req, fn); err != nil { + if errors.Is(err, context.Canceled) { + return nil, nil + } + return nil, err + } + + if len(opts.Messages) > 0 { + fmt.Println() + fmt.Println() + } + + verbose, err := cmd.Flags().GetBool("verbose") + if err != nil { + return nil, err + } + + if verbose { + latest.Summary() + } + + return &api.Message{Role: role, Content: fullResponse.String()}, nil +} + +func generate(cmd *cobra.Command, opts runOptions) error { + client, err := api.ClientFromEnvironment() + if err != nil { + return err + } + + p := progress.NewProgress(os.Stderr) + defer p.StopAndClear() + + spinner := progress.NewSpinner("") + p.Add("", spinner) + + var latest api.GenerateResponse + + generateContext, ok := cmd.Context().Value(generateContextKey("context")).([]int) + if !ok { + generateContext = []int{} + } + + ctx, cancel := context.WithCancel(cmd.Context()) + defer cancel() + + sigChan := make(chan os.Signal, 1) + signal.Notify(sigChan, syscall.SIGINT) + + go func() { + <-sigChan + cancel() + }() + + var state *displayResponseState = &displayResponseState{} + + fn := func(response api.GenerateResponse) error { + p.StopAndClear() + + latest = response + content := response.Response + + displayResponse(content, opts.WordWrap, state) + + return nil + } + + if opts.MultiModal { + opts.Prompt, opts.Images, err = extractFileData(opts.Prompt) + if err != nil { + return err + } + } + + if opts.Format == "json" { + opts.Format = `"` + opts.Format + `"` + } + + request := api.GenerateRequest{ + Model: opts.Model, + Prompt: opts.Prompt, + Context: generateContext, + Images: opts.Images, + Format: json.RawMessage(opts.Format), + System: opts.System, + Options: opts.Options, + KeepAlive: opts.KeepAlive, + } + + if err := client.Generate(ctx, &request, fn); err != nil { + if errors.Is(err, context.Canceled) { + return nil + } + return err + } + + if opts.Prompt != "" { + fmt.Println() + fmt.Println() + } + + if !latest.Done { + return nil + } + + verbose, err := cmd.Flags().GetBool("verbose") + if err != nil { + return err + } + + if verbose { + latest.Summary() + } + + ctx = context.WithValue(cmd.Context(), generateContextKey("context"), latest.Context) + cmd.SetContext(ctx) + + return nil +} + +func RunServer(_ *cobra.Command, _ []string) error { + if err := initializeKeypair(); err != nil { + return err + } + + ln, err := net.Listen("tcp", envconfig.Host().Host) + if err != nil { + return err + } + + err = server.Serve(ln) + if errors.Is(err, http.ErrServerClosed) { + return nil + } + + return err +} + +func initializeKeypair() error { + home, err := os.UserHomeDir() + if err != nil { + return err + } + + privKeyPath := filepath.Join(home, ".ollama", "id_ed25519") + pubKeyPath := filepath.Join(home, ".ollama", "id_ed25519.pub") + + _, err = os.Stat(privKeyPath) + if os.IsNotExist(err) { + fmt.Printf("Couldn't find '%s'. Generating new private key.\n", privKeyPath) + cryptoPublicKey, cryptoPrivateKey, err := ed25519.GenerateKey(rand.Reader) + if err != nil { + return err + } + + privateKeyBytes, err := ssh.MarshalPrivateKey(cryptoPrivateKey, "") + if err != nil { + return err + } + + if err := os.MkdirAll(filepath.Dir(privKeyPath), 0o755); err != nil { + return fmt.Errorf("could not create directory %w", err) + } + + if err := os.WriteFile(privKeyPath, pem.EncodeToMemory(privateKeyBytes), 0o600); err != nil { + return err + } + + sshPublicKey, err := ssh.NewPublicKey(cryptoPublicKey) + if err != nil { + return err + } + + publicKeyBytes := ssh.MarshalAuthorizedKey(sshPublicKey) + + if err := os.WriteFile(pubKeyPath, publicKeyBytes, 0o644); err != nil { + return err + } + + fmt.Printf("Your new public key is: \n\n%s\n", publicKeyBytes) + } + return nil +} + +func checkServerHeartbeat(cmd *cobra.Command, _ []string) error { + client, err := api.ClientFromEnvironment() + if err != nil { + return err + } + if err := client.Heartbeat(cmd.Context()); err != nil { + if !strings.Contains(err.Error(), " refused") { + return err + } + if err := startApp(cmd.Context(), client); err != nil { + return errors.New("could not connect to ollama app, is it running?") + } + } + return nil +} + +func versionHandler(cmd *cobra.Command, _ []string) { + client, err := api.ClientFromEnvironment() + if err != nil { + return + } + + serverVersion, err := client.Version(cmd.Context()) + if err != nil { + fmt.Println("Warning: could not connect to a running Ollama instance") + } + + if serverVersion != "" { + fmt.Printf("ollama version is %s\n", serverVersion) + } + + if serverVersion != version.Version { + fmt.Printf("Warning: client version is %s\n", version.Version) + } +} + +func appendEnvDocs(cmd *cobra.Command, envs []envconfig.EnvVar) { + if len(envs) == 0 { + return + } + + envUsage := ` +Environment Variables: +` + for _, e := range envs { + envUsage += fmt.Sprintf(" %-24s %s\n", e.Name, e.Description) + } + + cmd.SetUsageTemplate(cmd.UsageTemplate() + envUsage) +} + +func NewCLI() *cobra.Command { + log.SetFlags(log.LstdFlags | log.Lshortfile) + cobra.EnableCommandSorting = false + + if runtime.GOOS == "windows" && term.IsTerminal(int(os.Stdout.Fd())) { + console.ConsoleFromFile(os.Stdin) //nolint:errcheck + } + + rootCmd := &cobra.Command{ + Use: "ollama", + Short: "Large language model runner", + SilenceUsage: true, + SilenceErrors: true, + CompletionOptions: cobra.CompletionOptions{ + DisableDefaultCmd: true, + }, + Run: func(cmd *cobra.Command, args []string) { + if version, _ := cmd.Flags().GetBool("version"); version { + versionHandler(cmd, args) + return + } + + cmd.Print(cmd.UsageString()) + }, + } + + rootCmd.Flags().BoolP("version", "v", false, "Show version information") + + createCmd := &cobra.Command{ + Use: "create MODEL", + Short: "Create a model from a Modelfile", + Args: cobra.ExactArgs(1), + PreRunE: checkServerHeartbeat, + RunE: CreateHandler, + } + + createCmd.Flags().StringP("file", "f", "", "Name of the Modelfile (default \"Modelfile\"") + createCmd.Flags().StringP("quantize", "q", "", "Quantize model to this level (e.g. q4_K_M)") + + showCmd := &cobra.Command{ + Use: "show MODEL", + Short: "Show information for a model", + Args: cobra.ExactArgs(1), + PreRunE: checkServerHeartbeat, + RunE: ShowHandler, + } + + showCmd.Flags().Bool("license", false, "Show license of a model") + showCmd.Flags().Bool("modelfile", false, "Show Modelfile of a model") + showCmd.Flags().Bool("parameters", false, "Show parameters of a model") + showCmd.Flags().Bool("template", false, "Show template of a model") + showCmd.Flags().Bool("system", false, "Show system message of a model") + showCmd.Flags().BoolP("verbose", "v", false, "Show detailed model information") + + runCmd := &cobra.Command{ + Use: "run MODEL [PROMPT]", + Short: "Run a model", + Args: cobra.MinimumNArgs(1), + PreRunE: checkServerHeartbeat, + RunE: RunHandler, + } + + runCmd.Flags().String("keepalive", "", "Duration to keep a model loaded (e.g. 5m)") + runCmd.Flags().Bool("verbose", false, "Show timings for response") + runCmd.Flags().Bool("insecure", false, "Use an insecure registry") + runCmd.Flags().Bool("nowordwrap", false, "Don't wrap words to the next line automatically") + runCmd.Flags().String("format", "", "Response format (e.g. json)") + + stopCmd := &cobra.Command{ + Use: "stop MODEL", + Short: "Stop a running model", + Args: cobra.ExactArgs(1), + PreRunE: checkServerHeartbeat, + RunE: StopHandler, + } + + serveCmd := &cobra.Command{ + Use: "serve", + Aliases: []string{"start"}, + Short: "Start ollama", + Args: cobra.ExactArgs(0), + RunE: RunServer, + } + + pullCmd := &cobra.Command{ + Use: "pull MODEL", + Short: "Pull a model from a registry", + Args: cobra.ExactArgs(1), + PreRunE: checkServerHeartbeat, + RunE: PullHandler, + } + + pullCmd.Flags().Bool("insecure", false, "Use an insecure registry") + + pushCmd := &cobra.Command{ + Use: "push MODEL", + Short: "Push a model to a registry", + Args: cobra.ExactArgs(1), + PreRunE: checkServerHeartbeat, + RunE: PushHandler, + } + + pushCmd.Flags().Bool("insecure", false, "Use an insecure registry") + + listCmd := &cobra.Command{ + Use: "list", + Aliases: []string{"ls"}, + Short: "List models", + PreRunE: checkServerHeartbeat, + RunE: ListHandler, + } + + psCmd := &cobra.Command{ + Use: "ps", + Short: "List running models", + PreRunE: checkServerHeartbeat, + RunE: ListRunningHandler, + } + + copyCmd := &cobra.Command{ + Use: "cp SOURCE DESTINATION", + Short: "Copy a model", + Args: cobra.ExactArgs(2), + PreRunE: checkServerHeartbeat, + RunE: CopyHandler, + } + + deleteCmd := &cobra.Command{ + Use: "rm MODEL [MODEL...]", + Short: "Remove a model", + Args: cobra.MinimumNArgs(1), + PreRunE: checkServerHeartbeat, + RunE: DeleteHandler, + } + + runnerCmd := &cobra.Command{ + Use: "runner", + Hidden: true, + RunE: func(cmd *cobra.Command, args []string) error { + return runner.Execute(os.Args[1:]) + }, + FParseErrWhitelist: cobra.FParseErrWhitelist{UnknownFlags: true}, + } + runnerCmd.SetHelpFunc(func(cmd *cobra.Command, args []string) { + _ = runner.Execute(args[1:]) + }) + + envVars := envconfig.AsMap() + + envs := []envconfig.EnvVar{envVars["OLLAMA_HOST"]} + + for _, cmd := range []*cobra.Command{ + createCmd, + showCmd, + runCmd, + stopCmd, + pullCmd, + pushCmd, + listCmd, + psCmd, + copyCmd, + deleteCmd, + serveCmd, + } { + switch cmd { + case runCmd: + appendEnvDocs(cmd, []envconfig.EnvVar{envVars["OLLAMA_HOST"], envVars["OLLAMA_NOHISTORY"]}) + case serveCmd: + appendEnvDocs(cmd, []envconfig.EnvVar{ + envVars["OLLAMA_DEBUG"], + envVars["OLLAMA_HOST"], + envVars["OLLAMA_KEEP_ALIVE"], + envVars["OLLAMA_MAX_LOADED_MODELS"], + envVars["OLLAMA_MAX_QUEUE"], + envVars["OLLAMA_MODELS"], + envVars["OLLAMA_NUM_PARALLEL"], + envVars["OLLAMA_NOPRUNE"], + envVars["OLLAMA_ORIGINS"], + envVars["OLLAMA_SCHED_SPREAD"], + envVars["OLLAMA_FLASH_ATTENTION"], + envVars["OLLAMA_KV_CACHE_TYPE"], + envVars["OLLAMA_LLM_LIBRARY"], + envVars["OLLAMA_GPU_OVERHEAD"], + envVars["OLLAMA_LOAD_TIMEOUT"], + }) + default: + appendEnvDocs(cmd, envs) + } + } + + rootCmd.AddCommand( + serveCmd, + createCmd, + showCmd, + runCmd, + stopCmd, + pullCmd, + pushCmd, + listCmd, + psCmd, + copyCmd, + deleteCmd, + runnerCmd, + ) + + return rootCmd +} diff --git a/cmd/cmd_test.go b/cmd/cmd_test.go new file mode 100644 index 0000000..cf5fe7c --- /dev/null +++ b/cmd/cmd_test.go @@ -0,0 +1,917 @@ +package cmd + +import ( + "bytes" + "encoding/json" + "io" + "net/http" + "net/http/httptest" + "os" + "strings" + "testing" + "time" + + "github.com/google/go-cmp/cmp" + "github.com/spf13/cobra" + + "github.com/ollama/ollama/api" + "github.com/ollama/ollama/types/model" +) + +func TestShowInfo(t *testing.T) { + t.Run("bare details", func(t *testing.T) { + var b bytes.Buffer + if err := showInfo(&api.ShowResponse{ + Details: api.ModelDetails{ + Family: "test", + ParameterSize: "7B", + QuantizationLevel: "FP16", + }, + }, false, &b); err != nil { + t.Fatal(err) + } + + expect := ` Model + architecture test + parameters 7B + quantization FP16 + +` + + if diff := cmp.Diff(expect, b.String()); diff != "" { + t.Errorf("unexpected output (-want +got):\n%s", diff) + } + }) + + t.Run("bare model info", func(t *testing.T) { + var b bytes.Buffer + if err := showInfo(&api.ShowResponse{ + ModelInfo: map[string]any{ + "general.architecture": "test", + "general.parameter_count": float64(7_000_000_000), + "test.context_length": float64(0), + "test.embedding_length": float64(0), + }, + Details: api.ModelDetails{ + Family: "test", + ParameterSize: "7B", + QuantizationLevel: "FP16", + }, + }, false, &b); err != nil { + t.Fatal(err) + } + + expect := ` Model + architecture test + parameters 7B + context length 0 + embedding length 0 + quantization FP16 + +` + if diff := cmp.Diff(expect, b.String()); diff != "" { + t.Errorf("unexpected output (-want +got):\n%s", diff) + } + }) + + t.Run("verbose model", func(t *testing.T) { + var b bytes.Buffer + if err := showInfo(&api.ShowResponse{ + Details: api.ModelDetails{ + Family: "test", + ParameterSize: "8B", + QuantizationLevel: "FP16", + }, + Parameters: ` + stop up`, + ModelInfo: map[string]any{ + "general.architecture": "test", + "general.parameter_count": float64(8_000_000_000), + "some.true_bool": true, + "some.false_bool": false, + "test.context_length": float64(1000), + "test.embedding_length": float64(11434), + }, + Tensors: []api.Tensor{ + {Name: "blk.0.attn_k.weight", Type: "BF16", Shape: []uint64{42, 3117}}, + {Name: "blk.0.attn_q.weight", Type: "FP16", Shape: []uint64{3117, 42}}, + }, + }, true, &b); err != nil { + t.Fatal(err) + } + + expect := ` Model + architecture test + parameters 8B + context length 1000 + embedding length 11434 + quantization FP16 + + Parameters + stop up + + Metadata + general.architecture test + general.parameter_count 8e+09 + some.false_bool false + some.true_bool true + test.context_length 1000 + test.embedding_length 11434 + + Tensors + blk.0.attn_k.weight BF16 [42 3117] + blk.0.attn_q.weight FP16 [3117 42] + +` + if diff := cmp.Diff(expect, b.String()); diff != "" { + t.Errorf("unexpected output (-want +got):\n%s", diff) + } + }) + + t.Run("parameters", func(t *testing.T) { + var b bytes.Buffer + if err := showInfo(&api.ShowResponse{ + Details: api.ModelDetails{ + Family: "test", + ParameterSize: "7B", + QuantizationLevel: "FP16", + }, + Parameters: ` + stop never + stop gonna + stop give + stop you + stop up + temperature 99`, + }, false, &b); err != nil { + t.Fatal(err) + } + + expect := ` Model + architecture test + parameters 7B + quantization FP16 + + Parameters + stop never + stop gonna + stop give + stop you + stop up + temperature 99 + +` + if diff := cmp.Diff(expect, b.String()); diff != "" { + t.Errorf("unexpected output (-want +got):\n%s", diff) + } + }) + + t.Run("project info", func(t *testing.T) { + var b bytes.Buffer + if err := showInfo(&api.ShowResponse{ + Details: api.ModelDetails{ + Family: "test", + ParameterSize: "7B", + QuantizationLevel: "FP16", + }, + ProjectorInfo: map[string]any{ + "general.architecture": "clip", + "general.parameter_count": float64(133_700_000), + "clip.vision.embedding_length": float64(0), + "clip.vision.projection_dim": float64(0), + }, + }, false, &b); err != nil { + t.Fatal(err) + } + + expect := ` Model + architecture test + parameters 7B + quantization FP16 + + Projector + architecture clip + parameters 133.70M + embedding length 0 + dimensions 0 + +` + if diff := cmp.Diff(expect, b.String()); diff != "" { + t.Errorf("unexpected output (-want +got):\n%s", diff) + } + }) + + t.Run("system", func(t *testing.T) { + var b bytes.Buffer + if err := showInfo(&api.ShowResponse{ + Details: api.ModelDetails{ + Family: "test", + ParameterSize: "7B", + QuantizationLevel: "FP16", + }, + System: `You are a pirate! +Ahoy, matey! +Weigh anchor! + `, + }, false, &b); err != nil { + t.Fatal(err) + } + + expect := ` Model + architecture test + parameters 7B + quantization FP16 + + System + You are a pirate! + Ahoy, matey! + ... + +` + if diff := cmp.Diff(expect, b.String()); diff != "" { + t.Errorf("unexpected output (-want +got):\n%s", diff) + } + }) + + t.Run("license", func(t *testing.T) { + var b bytes.Buffer + license := "MIT License\nCopyright (c) Ollama\n" + if err := showInfo(&api.ShowResponse{ + Details: api.ModelDetails{ + Family: "test", + ParameterSize: "7B", + QuantizationLevel: "FP16", + }, + License: license, + }, false, &b); err != nil { + t.Fatal(err) + } + + expect := ` Model + architecture test + parameters 7B + quantization FP16 + + License + MIT License + Copyright (c) Ollama + +` + if diff := cmp.Diff(expect, b.String()); diff != "" { + t.Errorf("unexpected output (-want +got):\n%s", diff) + } + }) + + t.Run("capabilities", func(t *testing.T) { + var b bytes.Buffer + if err := showInfo(&api.ShowResponse{ + Details: api.ModelDetails{ + Family: "test", + ParameterSize: "7B", + QuantizationLevel: "FP16", + }, + Capabilities: []model.Capability{model.CapabilityVision, model.CapabilityTools}, + }, false, &b); err != nil { + t.Fatal(err) + } + + expect := " Model\n" + + " architecture test \n" + + " parameters 7B \n" + + " quantization FP16 \n" + + "\n" + + " Capabilities\n" + + " vision \n" + + " tools \n" + + "\n" + + if diff := cmp.Diff(expect, b.String()); diff != "" { + t.Errorf("unexpected output (-want +got):\n%s", diff) + } + }) +} + +func TestDeleteHandler(t *testing.T) { + stopped := false + mockServer := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + if r.URL.Path == "/api/delete" && r.Method == http.MethodDelete { + var req api.DeleteRequest + if err := json.NewDecoder(r.Body).Decode(&req); err != nil { + http.Error(w, err.Error(), http.StatusBadRequest) + return + } + if req.Name == "test-model" { + w.WriteHeader(http.StatusOK) + } else { + w.WriteHeader(http.StatusNotFound) + } + return + } + if r.URL.Path == "/api/generate" && r.Method == http.MethodPost { + var req api.GenerateRequest + if err := json.NewDecoder(r.Body).Decode(&req); err != nil { + http.Error(w, err.Error(), http.StatusBadRequest) + return + } + if req.Model == "test-model" { + w.WriteHeader(http.StatusOK) + if err := json.NewEncoder(w).Encode(api.GenerateResponse{ + Done: true, + }); err != nil { + http.Error(w, err.Error(), http.StatusInternalServerError) + } + stopped = true + return + } else { + w.WriteHeader(http.StatusNotFound) + if err := json.NewEncoder(w).Encode(api.GenerateResponse{ + Done: false, + }); err != nil { + http.Error(w, err.Error(), http.StatusInternalServerError) + } + } + } + })) + + t.Setenv("OLLAMA_HOST", mockServer.URL) + t.Cleanup(mockServer.Close) + + cmd := &cobra.Command{} + cmd.SetContext(t.Context()) + if err := DeleteHandler(cmd, []string{"test-model"}); err != nil { + t.Fatalf("DeleteHandler failed: %v", err) + } + if !stopped { + t.Fatal("Model was not stopped before deletion") + } + + err := DeleteHandler(cmd, []string{"test-model-not-found"}) + if err == nil || !strings.Contains(err.Error(), "unable to stop existing running model \"test-model-not-found\"") { + t.Fatalf("DeleteHandler failed: expected error about stopping non-existent model, got %v", err) + } +} + +func TestGetModelfileName(t *testing.T) { + tests := []struct { + name string + modelfileName string + fileExists bool + expectedName string + expectedErr error + }{ + { + name: "no modelfile specified, no modelfile exists", + modelfileName: "", + fileExists: false, + expectedName: "", + expectedErr: os.ErrNotExist, + }, + { + name: "no modelfile specified, modelfile exists", + modelfileName: "", + fileExists: true, + expectedName: "Modelfile", + expectedErr: nil, + }, + { + name: "modelfile specified, no modelfile exists", + modelfileName: "crazyfile", + fileExists: false, + expectedName: "", + expectedErr: os.ErrNotExist, + }, + { + name: "modelfile specified, modelfile exists", + modelfileName: "anotherfile", + fileExists: true, + expectedName: "anotherfile", + expectedErr: nil, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + cmd := &cobra.Command{ + Use: "fakecmd", + } + cmd.Flags().String("file", "", "path to modelfile") + + var expectedFilename string + + if tt.fileExists { + var fn string + if tt.modelfileName != "" { + fn = tt.modelfileName + } else { + fn = "Modelfile" + } + + tempFile, err := os.CreateTemp(t.TempDir(), fn) + if err != nil { + t.Fatalf("temp modelfile creation failed: %v", err) + } + defer tempFile.Close() + + expectedFilename = tempFile.Name() + err = cmd.Flags().Set("file", expectedFilename) + if err != nil { + t.Fatalf("couldn't set file flag: %v", err) + } + } else { + expectedFilename = tt.expectedName + if tt.modelfileName != "" { + err := cmd.Flags().Set("file", tt.modelfileName) + if err != nil { + t.Fatalf("couldn't set file flag: %v", err) + } + } + } + + actualFilename, actualErr := getModelfileName(cmd) + + if actualFilename != expectedFilename { + t.Errorf("expected filename: '%s' actual filename: '%s'", expectedFilename, actualFilename) + } + + if tt.expectedErr != os.ErrNotExist { + if actualErr != tt.expectedErr { + t.Errorf("expected err: %v actual err: %v", tt.expectedErr, actualErr) + } + } else { + if !os.IsNotExist(actualErr) { + t.Errorf("expected err: %v actual err: %v", tt.expectedErr, actualErr) + } + } + }) + } +} + +func TestPushHandler(t *testing.T) { + tests := []struct { + name string + modelName string + serverResponse map[string]func(w http.ResponseWriter, r *http.Request) + expectedError string + expectedOutput string + }{ + { + name: "successful push", + modelName: "test-model", + serverResponse: map[string]func(w http.ResponseWriter, r *http.Request){ + "/api/push": func(w http.ResponseWriter, r *http.Request) { + if r.Method != http.MethodPost { + t.Errorf("expected POST request, got %s", r.Method) + } + + var req api.PushRequest + if err := json.NewDecoder(r.Body).Decode(&req); err != nil { + http.Error(w, err.Error(), http.StatusBadRequest) + return + } + + if req.Name != "test-model" { + t.Errorf("expected model name 'test-model', got %s", req.Name) + } + + // Simulate progress updates + responses := []api.ProgressResponse{ + {Status: "preparing manifest"}, + {Digest: "sha256:abc123456789", Total: 100, Completed: 50}, + {Digest: "sha256:abc123456789", Total: 100, Completed: 100}, + } + + for _, resp := range responses { + if err := json.NewEncoder(w).Encode(resp); err != nil { + http.Error(w, err.Error(), http.StatusInternalServerError) + return + } + w.(http.Flusher).Flush() + } + }, + }, + expectedOutput: "\nYou can find your model at:\n\n\thttps://ollama.com/test-model\n", + }, + { + name: "unauthorized push", + modelName: "unauthorized-model", + serverResponse: map[string]func(w http.ResponseWriter, r *http.Request){ + "/api/push": func(w http.ResponseWriter, r *http.Request) { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusUnauthorized) + err := json.NewEncoder(w).Encode(map[string]string{ + "error": "access denied", + }) + if err != nil { + t.Fatal(err) + } + }, + }, + expectedError: "you are not authorized to push to this namespace, create the model under a namespace you own", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + mockServer := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + if handler, ok := tt.serverResponse[r.URL.Path]; ok { + handler(w, r) + return + } + http.Error(w, "not found", http.StatusNotFound) + })) + defer mockServer.Close() + + t.Setenv("OLLAMA_HOST", mockServer.URL) + + cmd := &cobra.Command{} + cmd.Flags().Bool("insecure", false, "") + cmd.SetContext(t.Context()) + + // Redirect stderr to capture progress output + oldStderr := os.Stderr + r, w, _ := os.Pipe() + os.Stderr = w + + // Capture stdout for the "Model pushed" message + oldStdout := os.Stdout + outR, outW, _ := os.Pipe() + os.Stdout = outW + + err := PushHandler(cmd, []string{tt.modelName}) + + // Restore stderr + w.Close() + os.Stderr = oldStderr + // drain the pipe + if _, err := io.ReadAll(r); err != nil { + t.Fatal(err) + } + + // Restore stdout and get output + outW.Close() + os.Stdout = oldStdout + stdout, _ := io.ReadAll(outR) + + if tt.expectedError == "" { + if err != nil { + t.Errorf("expected no error, got %v", err) + } + if tt.expectedOutput != "" { + if got := string(stdout); got != tt.expectedOutput { + t.Errorf("expected output %q, got %q", tt.expectedOutput, got) + } + } + } else { + if err == nil || !strings.Contains(err.Error(), tt.expectedError) { + t.Errorf("expected error containing %q, got %v", tt.expectedError, err) + } + } + }) + } +} + +func TestListHandler(t *testing.T) { + tests := []struct { + name string + args []string + serverResponse []api.ListModelResponse + expectedError string + expectedOutput string + }{ + { + name: "list all models", + args: []string{}, + serverResponse: []api.ListModelResponse{ + {Name: "model1", Digest: "sha256:abc123", Size: 1024, ModifiedAt: time.Now().Add(-24 * time.Hour)}, + {Name: "model2", Digest: "sha256:def456", Size: 2048, ModifiedAt: time.Now().Add(-48 * time.Hour)}, + }, + expectedOutput: "NAME ID SIZE MODIFIED \n" + + "model1 sha256:abc12 1.0 KB 24 hours ago \n" + + "model2 sha256:def45 2.0 KB 2 days ago \n", + }, + { + name: "filter models by prefix", + args: []string{"model1"}, + serverResponse: []api.ListModelResponse{ + {Name: "model1", Digest: "sha256:abc123", Size: 1024, ModifiedAt: time.Now().Add(-24 * time.Hour)}, + {Name: "model2", Digest: "sha256:def456", Size: 2048, ModifiedAt: time.Now().Add(-24 * time.Hour)}, + }, + expectedOutput: "NAME ID SIZE MODIFIED \n" + + "model1 sha256:abc12 1.0 KB 24 hours ago \n", + }, + { + name: "server error", + args: []string{}, + expectedError: "server error", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + mockServer := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + if r.URL.Path != "/api/tags" || r.Method != http.MethodGet { + t.Errorf("unexpected request to %s %s", r.Method, r.URL.Path) + http.Error(w, "not found", http.StatusNotFound) + return + } + + if tt.expectedError != "" { + http.Error(w, tt.expectedError, http.StatusInternalServerError) + return + } + + response := api.ListResponse{Models: tt.serverResponse} + if err := json.NewEncoder(w).Encode(response); err != nil { + t.Fatal(err) + } + })) + defer mockServer.Close() + + t.Setenv("OLLAMA_HOST", mockServer.URL) + + cmd := &cobra.Command{} + cmd.SetContext(t.Context()) + + // Capture stdout + oldStdout := os.Stdout + r, w, _ := os.Pipe() + os.Stdout = w + + err := ListHandler(cmd, tt.args) + + // Restore stdout and get output + w.Close() + os.Stdout = oldStdout + output, _ := io.ReadAll(r) + + if tt.expectedError == "" { + if err != nil { + t.Errorf("expected no error, got %v", err) + } + if got := string(output); got != tt.expectedOutput { + t.Errorf("expected output:\n%s\ngot:\n%s", tt.expectedOutput, got) + } + } else { + if err == nil || !strings.Contains(err.Error(), tt.expectedError) { + t.Errorf("expected error containing %q, got %v", tt.expectedError, err) + } + } + }) + } +} + +func TestCreateHandler(t *testing.T) { + tests := []struct { + name string + modelName string + modelFile string + serverResponse map[string]func(w http.ResponseWriter, r *http.Request) + expectedError string + expectedOutput string + }{ + { + name: "successful create", + modelName: "test-model", + modelFile: "FROM foo", + serverResponse: map[string]func(w http.ResponseWriter, r *http.Request){ + "/api/create": func(w http.ResponseWriter, r *http.Request) { + if r.Method != http.MethodPost { + t.Errorf("expected POST request, got %s", r.Method) + } + + req := api.CreateRequest{} + if err := json.NewDecoder(r.Body).Decode(&req); err != nil { + http.Error(w, err.Error(), http.StatusBadRequest) + return + } + + if req.Model != "test-model" { + t.Errorf("expected model name 'test-model', got %s", req.Name) + } + + if req.From != "foo" { + t.Errorf("expected from 'foo', got %s", req.From) + } + + responses := []api.ProgressResponse{ + {Status: "using existing layer sha256:56bb8bd477a519ffa694fc449c2413c6f0e1d3b1c88fa7e3c9d88d3ae49d4dcb"}, + {Status: "writing manifest"}, + {Status: "success"}, + } + + for _, resp := range responses { + if err := json.NewEncoder(w).Encode(resp); err != nil { + http.Error(w, err.Error(), http.StatusInternalServerError) + return + } + w.(http.Flusher).Flush() + } + }, + }, + expectedOutput: "", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + mockServer := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + handler, ok := tt.serverResponse[r.URL.Path] + if !ok { + t.Errorf("unexpected request to %s", r.URL.Path) + http.Error(w, "not found", http.StatusNotFound) + return + } + handler(w, r) + })) + t.Setenv("OLLAMA_HOST", mockServer.URL) + t.Cleanup(mockServer.Close) + tempFile, err := os.CreateTemp(t.TempDir(), "modelfile") + if err != nil { + t.Fatal(err) + } + defer os.Remove(tempFile.Name()) + + if _, err := tempFile.WriteString(tt.modelFile); err != nil { + t.Fatal(err) + } + if err := tempFile.Close(); err != nil { + t.Fatal(err) + } + + cmd := &cobra.Command{} + cmd.Flags().String("file", "", "") + if err := cmd.Flags().Set("file", tempFile.Name()); err != nil { + t.Fatal(err) + } + + cmd.Flags().Bool("insecure", false, "") + cmd.SetContext(t.Context()) + + // Redirect stderr to capture progress output + oldStderr := os.Stderr + r, w, _ := os.Pipe() + os.Stderr = w + + // Capture stdout for the "Model pushed" message + oldStdout := os.Stdout + outR, outW, _ := os.Pipe() + os.Stdout = outW + + err = CreateHandler(cmd, []string{tt.modelName}) + + // Restore stderr + w.Close() + os.Stderr = oldStderr + // drain the pipe + if _, err := io.ReadAll(r); err != nil { + t.Fatal(err) + } + + // Restore stdout and get output + outW.Close() + os.Stdout = oldStdout + stdout, _ := io.ReadAll(outR) + + if tt.expectedError == "" { + if err != nil { + t.Errorf("expected no error, got %v", err) + } + + if tt.expectedOutput != "" { + if got := string(stdout); got != tt.expectedOutput { + t.Errorf("expected output %q, got %q", tt.expectedOutput, got) + } + } + } + }) + } +} + +func TestNewCreateRequest(t *testing.T) { + tests := []struct { + name string + from string + opts runOptions + expected *api.CreateRequest + }{ + { + "basic test", + "newmodel", + runOptions{ + Model: "mymodel", + ParentModel: "", + Prompt: "You are a fun AI agent", + Messages: []api.Message{}, + WordWrap: true, + }, + &api.CreateRequest{ + From: "mymodel", + Model: "newmodel", + }, + }, + { + "parent model test", + "newmodel", + runOptions{ + Model: "mymodel", + ParentModel: "parentmodel", + Messages: []api.Message{}, + WordWrap: true, + }, + &api.CreateRequest{ + From: "parentmodel", + Model: "newmodel", + }, + }, + { + "parent model as filepath test", + "newmodel", + runOptions{ + Model: "mymodel", + ParentModel: "/some/file/like/etc/passwd", + Messages: []api.Message{}, + WordWrap: true, + }, + &api.CreateRequest{ + From: "mymodel", + Model: "newmodel", + }, + }, + { + "parent model as windows filepath test", + "newmodel", + runOptions{ + Model: "mymodel", + ParentModel: "D:\\some\\file\\like\\etc\\passwd", + Messages: []api.Message{}, + WordWrap: true, + }, + &api.CreateRequest{ + From: "mymodel", + Model: "newmodel", + }, + }, + { + "options test", + "newmodel", + runOptions{ + Model: "mymodel", + ParentModel: "parentmodel", + Options: map[string]any{ + "temperature": 1.0, + }, + }, + &api.CreateRequest{ + From: "parentmodel", + Model: "newmodel", + Parameters: map[string]any{ + "temperature": 1.0, + }, + }, + }, + { + "messages test", + "newmodel", + runOptions{ + Model: "mymodel", + ParentModel: "parentmodel", + System: "You are a fun AI agent", + Messages: []api.Message{ + { + Role: "user", + Content: "hello there!", + }, + { + Role: "assistant", + Content: "hello to you!", + }, + }, + WordWrap: true, + }, + &api.CreateRequest{ + From: "parentmodel", + Model: "newmodel", + System: "You are a fun AI agent", + Messages: []api.Message{ + { + Role: "user", + Content: "hello there!", + }, + { + Role: "assistant", + Content: "hello to you!", + }, + }, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + actual := NewCreateRequest(tt.from, tt.opts) + if !cmp.Equal(actual, tt.expected) { + t.Errorf("expected output %#v, got %#v", tt.expected, actual) + } + }) + } +} diff --git a/cmd/interactive.go b/cmd/interactive.go new file mode 100644 index 0000000..d7e6fbc --- /dev/null +++ b/cmd/interactive.go @@ -0,0 +1,584 @@ +package cmd + +import ( + "cmp" + "errors" + "fmt" + "io" + "net/http" + "os" + "path/filepath" + "regexp" + "slices" + "strings" + + "github.com/spf13/cobra" + + "github.com/ollama/ollama/api" + "github.com/ollama/ollama/envconfig" + "github.com/ollama/ollama/readline" + "github.com/ollama/ollama/types/errtypes" + "github.com/ollama/ollama/types/model" +) + +type MultilineState int + +const ( + MultilineNone MultilineState = iota + MultilinePrompt + MultilineSystem +) + +func generateInteractive(cmd *cobra.Command, opts runOptions) error { + usage := func() { + fmt.Fprintln(os.Stderr, "Available Commands:") + fmt.Fprintln(os.Stderr, " /set Set session variables") + fmt.Fprintln(os.Stderr, " /show Show model information") + fmt.Fprintln(os.Stderr, " /load Load a session or model") + fmt.Fprintln(os.Stderr, " /save Save your current session") + fmt.Fprintln(os.Stderr, " /clear Clear session context") + fmt.Fprintln(os.Stderr, " /bye Exit") + fmt.Fprintln(os.Stderr, " /?, /help Help for a command") + fmt.Fprintln(os.Stderr, " /? shortcuts Help for keyboard shortcuts") + fmt.Fprintln(os.Stderr, "") + fmt.Fprintln(os.Stderr, "Use \"\"\" to begin a multi-line message.") + + if opts.MultiModal { + fmt.Fprintf(os.Stderr, "Use %s to include .jpg, .png, or .webp images.\n", filepath.FromSlash("/path/to/file")) + } + + fmt.Fprintln(os.Stderr, "") + } + + usageSet := func() { + fmt.Fprintln(os.Stderr, "Available Commands:") + fmt.Fprintln(os.Stderr, " /set parameter ... Set a parameter") + fmt.Fprintln(os.Stderr, " /set system Set system message") + fmt.Fprintln(os.Stderr, " /set history Enable history") + fmt.Fprintln(os.Stderr, " /set nohistory Disable history") + fmt.Fprintln(os.Stderr, " /set wordwrap Enable wordwrap") + fmt.Fprintln(os.Stderr, " /set nowordwrap Disable wordwrap") + fmt.Fprintln(os.Stderr, " /set format json Enable JSON mode") + fmt.Fprintln(os.Stderr, " /set noformat Disable formatting") + fmt.Fprintln(os.Stderr, " /set verbose Show LLM stats") + fmt.Fprintln(os.Stderr, " /set quiet Disable LLM stats") + fmt.Fprintln(os.Stderr, "") + } + + usageShortcuts := func() { + fmt.Fprintln(os.Stderr, "Available keyboard shortcuts:") + fmt.Fprintln(os.Stderr, " Ctrl + a Move to the beginning of the line (Home)") + fmt.Fprintln(os.Stderr, " Ctrl + e Move to the end of the line (End)") + fmt.Fprintln(os.Stderr, " Alt + b Move back (left) one word") + fmt.Fprintln(os.Stderr, " Alt + f Move forward (right) one word") + fmt.Fprintln(os.Stderr, " Ctrl + k Delete the sentence after the cursor") + fmt.Fprintln(os.Stderr, " Ctrl + u Delete the sentence before the cursor") + fmt.Fprintln(os.Stderr, " Ctrl + w Delete the word before the cursor") + fmt.Fprintln(os.Stderr, "") + fmt.Fprintln(os.Stderr, " Ctrl + l Clear the screen") + fmt.Fprintln(os.Stderr, " Ctrl + c Stop the model from responding") + fmt.Fprintln(os.Stderr, " Ctrl + d Exit ollama (/bye)") + fmt.Fprintln(os.Stderr, "") + } + + usageShow := func() { + fmt.Fprintln(os.Stderr, "Available Commands:") + fmt.Fprintln(os.Stderr, " /show info Show details for this model") + fmt.Fprintln(os.Stderr, " /show license Show model license") + fmt.Fprintln(os.Stderr, " /show modelfile Show Modelfile for this model") + fmt.Fprintln(os.Stderr, " /show parameters Show parameters for this model") + fmt.Fprintln(os.Stderr, " /show system Show system message") + fmt.Fprintln(os.Stderr, " /show template Show prompt template") + fmt.Fprintln(os.Stderr, "") + } + + // only list out the most common parameters + usageParameters := func() { + fmt.Fprintln(os.Stderr, "Available Parameters:") + fmt.Fprintln(os.Stderr, " /set parameter seed Random number seed") + fmt.Fprintln(os.Stderr, " /set parameter num_predict Max number of tokens to predict") + fmt.Fprintln(os.Stderr, " /set parameter top_k Pick from top k num of tokens") + fmt.Fprintln(os.Stderr, " /set parameter top_p Pick token based on sum of probabilities") + fmt.Fprintln(os.Stderr, " /set parameter min_p Pick token based on top token probability * min_p") + fmt.Fprintln(os.Stderr, " /set parameter num_ctx Set the context size") + fmt.Fprintln(os.Stderr, " /set parameter temperature Set creativity level") + fmt.Fprintln(os.Stderr, " /set parameter repeat_penalty How strongly to penalize repetitions") + fmt.Fprintln(os.Stderr, " /set parameter repeat_last_n Set how far back to look for repetitions") + fmt.Fprintln(os.Stderr, " /set parameter num_gpu The number of layers to send to the GPU") + fmt.Fprintln(os.Stderr, " /set parameter stop ... Set the stop parameters") + fmt.Fprintln(os.Stderr, "") + } + + scanner, err := readline.New(readline.Prompt{ + Prompt: ">>> ", + AltPrompt: "... ", + Placeholder: "Send a message (/? for help)", + AltPlaceholder: `Use """ to end multi-line input`, + }) + if err != nil { + return err + } + + if envconfig.NoHistory() { + scanner.HistoryDisable() + } + + fmt.Print(readline.StartBracketedPaste) + defer fmt.Printf(readline.EndBracketedPaste) + + var sb strings.Builder + var multiline MultilineState + + for { + line, err := scanner.Readline() + switch { + case errors.Is(err, io.EOF): + fmt.Println() + return nil + case errors.Is(err, readline.ErrInterrupt): + if line == "" { + fmt.Println("\nUse Ctrl + d or /bye to exit.") + } + + scanner.Prompt.UseAlt = false + sb.Reset() + + continue + case err != nil: + return err + } + + switch { + case multiline != MultilineNone: + // check if there's a multiline terminating string + before, ok := strings.CutSuffix(line, `"""`) + sb.WriteString(before) + if !ok { + fmt.Fprintln(&sb) + continue + } + + switch multiline { + case MultilineSystem: + opts.System = sb.String() + opts.Messages = append(opts.Messages, api.Message{Role: "system", Content: opts.System}) + fmt.Println("Set system message.") + sb.Reset() + } + + multiline = MultilineNone + scanner.Prompt.UseAlt = false + case strings.HasPrefix(line, `"""`): + line := strings.TrimPrefix(line, `"""`) + line, ok := strings.CutSuffix(line, `"""`) + sb.WriteString(line) + if !ok { + // no multiline terminating string; need more input + fmt.Fprintln(&sb) + multiline = MultilinePrompt + scanner.Prompt.UseAlt = true + } + case scanner.Pasting: + fmt.Fprintln(&sb, line) + continue + case strings.HasPrefix(line, "/list"): + args := strings.Fields(line) + if err := ListHandler(cmd, args[1:]); err != nil { + return err + } + case strings.HasPrefix(line, "/load"): + args := strings.Fields(line) + if len(args) != 2 { + fmt.Println("Usage:\n /load ") + continue + } + opts.Model = args[1] + opts.Messages = []api.Message{} + fmt.Printf("Loading model '%s'\n", opts.Model) + if err := loadOrUnloadModel(cmd, &opts); err != nil { + if strings.Contains(err.Error(), "not found") { + fmt.Printf("error: %v\n", err) + continue + } + return err + } + continue + case strings.HasPrefix(line, "/save"): + args := strings.Fields(line) + if len(args) != 2 { + fmt.Println("Usage:\n /save ") + continue + } + + client, err := api.ClientFromEnvironment() + if err != nil { + fmt.Println("error: couldn't connect to ollama server") + return err + } + + req := NewCreateRequest(args[1], opts) + fn := func(resp api.ProgressResponse) error { return nil } + err = client.Create(cmd.Context(), req, fn) + if err != nil { + if strings.Contains(err.Error(), errtypes.InvalidModelNameErrMsg) { + fmt.Printf("error: The model name '%s' is invalid\n", args[1]) + continue + } + return err + } + fmt.Printf("Created new model '%s'\n", args[1]) + continue + case strings.HasPrefix(line, "/clear"): + opts.Messages = []api.Message{} + if opts.System != "" { + newMessage := api.Message{Role: "system", Content: opts.System} + opts.Messages = append(opts.Messages, newMessage) + } + fmt.Println("Cleared session context") + continue + case strings.HasPrefix(line, "/set"): + args := strings.Fields(line) + if len(args) > 1 { + switch args[1] { + case "history": + scanner.HistoryEnable() + case "nohistory": + scanner.HistoryDisable() + case "wordwrap": + opts.WordWrap = true + fmt.Println("Set 'wordwrap' mode.") + case "nowordwrap": + opts.WordWrap = false + fmt.Println("Set 'nowordwrap' mode.") + case "verbose": + if err := cmd.Flags().Set("verbose", "true"); err != nil { + return err + } + fmt.Println("Set 'verbose' mode.") + case "quiet": + if err := cmd.Flags().Set("verbose", "false"); err != nil { + return err + } + fmt.Println("Set 'quiet' mode.") + case "format": + if len(args) < 3 || args[2] != "json" { + fmt.Println("Invalid or missing format. For 'json' mode use '/set format json'") + } else { + opts.Format = args[2] + fmt.Printf("Set format to '%s' mode.\n", args[2]) + } + case "noformat": + opts.Format = "" + fmt.Println("Disabled format.") + case "parameter": + if len(args) < 4 { + usageParameters() + continue + } + params := args[3:] + fp, err := api.FormatParams(map[string][]string{args[2]: params}) + if err != nil { + fmt.Printf("Couldn't set parameter: %q\n", err) + continue + } + fmt.Printf("Set parameter '%s' to '%s'\n", args[2], strings.Join(params, ", ")) + opts.Options[args[2]] = fp[args[2]] + case "system": + if len(args) < 3 { + usageSet() + continue + } + + multiline = MultilineSystem + + line := strings.Join(args[2:], " ") + line, ok := strings.CutPrefix(line, `"""`) + if !ok { + multiline = MultilineNone + } else { + // only cut suffix if the line is multiline + line, ok = strings.CutSuffix(line, `"""`) + if ok { + multiline = MultilineNone + } + } + + sb.WriteString(line) + if multiline != MultilineNone { + scanner.Prompt.UseAlt = true + continue + } + + opts.System = sb.String() // for display in modelfile + newMessage := api.Message{Role: "system", Content: sb.String()} + // Check if the slice is not empty and the last message is from 'system' + if len(opts.Messages) > 0 && opts.Messages[len(opts.Messages)-1].Role == "system" { + // Replace the last message + opts.Messages[len(opts.Messages)-1] = newMessage + } else { + opts.Messages = append(opts.Messages, newMessage) + } + fmt.Println("Set system message.") + sb.Reset() + continue + default: + fmt.Printf("Unknown command '/set %s'. Type /? for help\n", args[1]) + } + } else { + usageSet() + } + case strings.HasPrefix(line, "/show"): + args := strings.Fields(line) + if len(args) > 1 { + client, err := api.ClientFromEnvironment() + if err != nil { + fmt.Println("error: couldn't connect to ollama server") + return err + } + req := &api.ShowRequest{ + Name: opts.Model, + System: opts.System, + Options: opts.Options, + } + resp, err := client.Show(cmd.Context(), req) + if err != nil { + fmt.Println("error: couldn't get model") + return err + } + + switch args[1] { + case "info": + _ = showInfo(resp, false, os.Stderr) + case "license": + if resp.License == "" { + fmt.Println("No license was specified for this model.") + } else { + fmt.Println(resp.License) + } + case "modelfile": + fmt.Println(resp.Modelfile) + case "parameters": + if resp.Parameters == "" { + fmt.Println("No parameters were specified for this model.") + } else { + if len(opts.Options) > 0 { + fmt.Println("User defined parameters:") + for k, v := range opts.Options { + fmt.Printf("%-*s %v\n", 30, k, v) + } + fmt.Println() + } + fmt.Println("Model defined parameters:") + fmt.Println(resp.Parameters) + } + case "system": + switch { + case opts.System != "": + fmt.Println(opts.System + "\n") + case resp.System != "": + fmt.Println(resp.System + "\n") + default: + fmt.Println("No system message was specified for this model.") + } + case "template": + if resp.Template != "" { + fmt.Println(resp.Template) + } else { + fmt.Println("No prompt template was specified for this model.") + } + default: + fmt.Printf("Unknown command '/show %s'. Type /? for help\n", args[1]) + } + } else { + usageShow() + } + case strings.HasPrefix(line, "/help"), strings.HasPrefix(line, "/?"): + args := strings.Fields(line) + if len(args) > 1 { + switch args[1] { + case "set", "/set": + usageSet() + case "show", "/show": + usageShow() + case "shortcut", "shortcuts": + usageShortcuts() + } + } else { + usage() + } + case strings.HasPrefix(line, "/exit"), strings.HasPrefix(line, "/bye"): + return nil + case strings.HasPrefix(line, "/"): + args := strings.Fields(line) + isFile := false + + if opts.MultiModal { + for _, f := range extractFileNames(line) { + if strings.HasPrefix(f, args[0]) { + isFile = true + break + } + } + } + + if !isFile { + fmt.Printf("Unknown command '%s'. Type /? for help\n", args[0]) + continue + } + + sb.WriteString(line) + default: + sb.WriteString(line) + } + + if sb.Len() > 0 && multiline == MultilineNone { + newMessage := api.Message{Role: "user", Content: sb.String()} + + if opts.MultiModal { + msg, images, err := extractFileData(sb.String()) + if err != nil { + return err + } + + newMessage.Content = msg + newMessage.Images = images + } + + opts.Messages = append(opts.Messages, newMessage) + + assistant, err := chat(cmd, opts) + if err != nil { + return err + } + if assistant != nil { + opts.Messages = append(opts.Messages, *assistant) + } + + sb.Reset() + } + } +} + +func NewCreateRequest(name string, opts runOptions) *api.CreateRequest { + parentModel := opts.ParentModel + + modelName := model.ParseName(parentModel) + if !modelName.IsValid() { + parentModel = "" + } + + req := &api.CreateRequest{ + Model: name, + From: cmp.Or(parentModel, opts.Model), + } + + if opts.System != "" { + req.System = opts.System + } + + if len(opts.Options) > 0 { + req.Parameters = opts.Options + } + + if len(opts.Messages) > 0 { + req.Messages = opts.Messages + } + + return req +} + +func normalizeFilePath(fp string) string { + return strings.NewReplacer( + "\\ ", " ", // Escaped space + "\\(", "(", // Escaped left parenthesis + "\\)", ")", // Escaped right parenthesis + "\\[", "[", // Escaped left square bracket + "\\]", "]", // Escaped right square bracket + "\\{", "{", // Escaped left curly brace + "\\}", "}", // Escaped right curly brace + "\\$", "$", // Escaped dollar sign + "\\&", "&", // Escaped ampersand + "\\;", ";", // Escaped semicolon + "\\'", "'", // Escaped single quote + "\\\\", "\\", // Escaped backslash + "\\*", "*", // Escaped asterisk + "\\?", "?", // Escaped question mark + "\\~", "~", // Escaped tilde + ).Replace(fp) +} + +func extractFileNames(input string) []string { + // Regex to match file paths starting with optional drive letter, / ./ \ or .\ and include escaped or unescaped spaces (\ or %20) + // and followed by more characters and a file extension + // This will capture non filename strings, but we'll check for file existence to remove mismatches + regexPattern := `(?:[a-zA-Z]:)?(?:\./|/|\\)[\S\\ ]+?\.(?i:jpg|jpeg|png|webp)\b` + re := regexp.MustCompile(regexPattern) + + return re.FindAllString(input, -1) +} + +func extractFileData(input string) (string, []api.ImageData, error) { + filePaths := extractFileNames(input) + var imgs []api.ImageData + + for _, fp := range filePaths { + nfp := normalizeFilePath(fp) + data, err := getImageData(nfp) + if errors.Is(err, os.ErrNotExist) { + continue + } else if err != nil { + fmt.Fprintf(os.Stderr, "Couldn't process image: %q\n", err) + return "", imgs, err + } + fmt.Fprintf(os.Stderr, "Added image '%s'\n", nfp) + input = strings.ReplaceAll(input, "'"+nfp+"'", "") + input = strings.ReplaceAll(input, "'"+fp+"'", "") + input = strings.ReplaceAll(input, fp, "") + imgs = append(imgs, data) + } + return strings.TrimSpace(input), imgs, nil +} + +func getImageData(filePath string) ([]byte, error) { + file, err := os.Open(filePath) + if err != nil { + return nil, err + } + defer file.Close() + + buf := make([]byte, 512) + _, err = file.Read(buf) + if err != nil { + return nil, err + } + + contentType := http.DetectContentType(buf) + allowedTypes := []string{"image/jpeg", "image/jpg", "image/png", "image/webp"} + if !slices.Contains(allowedTypes, contentType) { + return nil, fmt.Errorf("invalid image type: %s", contentType) + } + + info, err := file.Stat() + if err != nil { + return nil, err + } + + // Check if the file size exceeds 100MB + var maxSize int64 = 100 * 1024 * 1024 // 100MB in bytes + if info.Size() > maxSize { + return nil, errors.New("file size exceeds maximum limit (100MB)") + } + + buf = make([]byte, info.Size()) + _, err = file.Seek(0, 0) + if err != nil { + return nil, err + } + + _, err = io.ReadFull(file, buf) + if err != nil { + return nil, err + } + + return buf, nil +} diff --git a/cmd/interactive_test.go b/cmd/interactive_test.go new file mode 100644 index 0000000..809f53f --- /dev/null +++ b/cmd/interactive_test.go @@ -0,0 +1,86 @@ +package cmd + +import ( + "os" + "path/filepath" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestExtractFilenames(t *testing.T) { + // Unix style paths + input := ` some preamble + ./relative\ path/one.png inbetween1 ./not a valid two.jpg inbetween2 ./1.svg +/unescaped space /three.jpeg inbetween3 /valid\ path/dir/four.png "./quoted with spaces/five.JPG +/unescaped space /six.webp inbetween6 /valid\ path/dir/seven.WEBP` + res := extractFileNames(input) + assert.Len(t, res, 7) + assert.Contains(t, res[0], "one.png") + assert.Contains(t, res[1], "two.jpg") + assert.Contains(t, res[2], "three.jpeg") + assert.Contains(t, res[3], "four.png") + assert.Contains(t, res[4], "five.JPG") + assert.Contains(t, res[5], "six.webp") + assert.Contains(t, res[6], "seven.WEBP") + assert.NotContains(t, res[4], '"') + assert.NotContains(t, res, "inbetween1") + assert.NotContains(t, res, "./1.svg") + + // Windows style paths + input = ` some preamble + c:/users/jdoe/one.png inbetween1 c:/program files/someplace/two.jpg inbetween2 + /absolute/nospace/three.jpeg inbetween3 /absolute/with space/four.png inbetween4 +./relative\ path/five.JPG inbetween5 "./relative with/spaces/six.png inbetween6 +d:\path with\spaces\seven.JPEG inbetween7 c:\users\jdoe\eight.png inbetween8 + d:\program files\someplace\nine.png inbetween9 "E:\program files\someplace\ten.PNG +c:/users/jdoe/eleven.webp inbetween11 c:/program files/someplace/twelve.WebP inbetween12 +d:\path with\spaces\thirteen.WEBP some ending +` + res = extractFileNames(input) + assert.Len(t, res, 13) + assert.NotContains(t, res, "inbetween2") + assert.Contains(t, res[0], "one.png") + assert.Contains(t, res[0], "c:") + assert.Contains(t, res[1], "two.jpg") + assert.Contains(t, res[1], "c:") + assert.Contains(t, res[2], "three.jpeg") + assert.Contains(t, res[3], "four.png") + assert.Contains(t, res[4], "five.JPG") + assert.Contains(t, res[5], "six.png") + assert.Contains(t, res[6], "seven.JPEG") + assert.Contains(t, res[6], "d:") + assert.Contains(t, res[7], "eight.png") + assert.Contains(t, res[7], "c:") + assert.Contains(t, res[8], "nine.png") + assert.Contains(t, res[8], "d:") + assert.Contains(t, res[9], "ten.PNG") + assert.Contains(t, res[9], "E:") + assert.Contains(t, res[10], "eleven.webp") + assert.Contains(t, res[10], "c:") + assert.Contains(t, res[11], "twelve.WebP") + assert.Contains(t, res[11], "c:") + assert.Contains(t, res[12], "thirteen.WEBP") + assert.Contains(t, res[12], "d:") +} + +// Ensure that file paths wrapped in single quotes are removed with the quotes. +func TestExtractFileDataRemovesQuotedFilepath(t *testing.T) { + dir := t.TempDir() + fp := filepath.Join(dir, "img.jpg") + data := make([]byte, 600) + copy(data, []byte{ + 0xff, 0xd8, 0xff, 0xe0, 0x00, 0x10, 'J', 'F', 'I', 'F', + 0x00, 0x01, 0x01, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0xff, 0xd9, + }) + if err := os.WriteFile(fp, data, 0o600); err != nil { + t.Fatalf("failed to write test image: %v", err) + } + + input := "before '" + fp + "' after" + cleaned, imgs, err := extractFileData(input) + assert.NoError(t, err) + assert.Len(t, imgs, 1) + assert.Equal(t, cleaned, "before after") +} diff --git a/cmd/runner/main.go b/cmd/runner/main.go new file mode 100644 index 0000000..fbfafc7 --- /dev/null +++ b/cmd/runner/main.go @@ -0,0 +1,15 @@ +package main + +import ( + "fmt" + "os" + + "github.com/ollama/ollama/runner" +) + +func main() { + if err := runner.Execute(os.Args[1:]); err != nil { + fmt.Fprintf(os.Stderr, "error: %s\n", err) + os.Exit(1) + } +} diff --git a/cmd/start.go b/cmd/start.go new file mode 100644 index 0000000..0c4eed0 --- /dev/null +++ b/cmd/start.go @@ -0,0 +1,27 @@ +//go:build darwin || windows + +package cmd + +import ( + "context" + "errors" + "time" + + "github.com/ollama/ollama/api" +) + +func waitForServer(ctx context.Context, client *api.Client) error { + // wait for the server to start + timeout := time.After(5 * time.Second) + tick := time.Tick(500 * time.Millisecond) + for { + select { + case <-timeout: + return errors.New("timed out waiting for server to start") + case <-tick: + if err := client.Heartbeat(ctx); err == nil { + return nil // server has started + } + } + } +} diff --git a/cmd/start_darwin.go b/cmd/start_darwin.go new file mode 100644 index 0000000..1a9a1ae --- /dev/null +++ b/cmd/start_darwin.go @@ -0,0 +1,30 @@ +package cmd + +import ( + "context" + "errors" + "os" + "os/exec" + "strings" + + "github.com/ollama/ollama/api" +) + +func startApp(ctx context.Context, client *api.Client) error { + exe, err := os.Executable() + if err != nil { + return err + } + link, err := os.Readlink(exe) + if err != nil { + return err + } + if !strings.Contains(link, "Ollama.app") { + return errors.New("could not find ollama app") + } + path := strings.Split(link, "Ollama.app") + if err := exec.Command("/usr/bin/open", "-a", path[0]+"Ollama.app").Run(); err != nil { + return err + } + return waitForServer(ctx, client) +} diff --git a/cmd/start_default.go b/cmd/start_default.go new file mode 100644 index 0000000..5eabb28 --- /dev/null +++ b/cmd/start_default.go @@ -0,0 +1,14 @@ +//go:build !windows && !darwin + +package cmd + +import ( + "context" + "errors" + + "github.com/ollama/ollama/api" +) + +func startApp(ctx context.Context, client *api.Client) error { + return errors.New("could not connect to ollama server, run 'ollama serve' to start it") +} diff --git a/cmd/start_windows.go b/cmd/start_windows.go new file mode 100644 index 0000000..5bca243 --- /dev/null +++ b/cmd/start_windows.go @@ -0,0 +1,58 @@ +package cmd + +import ( + "context" + "errors" + "fmt" + "os" + "os/exec" + "path/filepath" + "strings" + "syscall" + + "github.com/ollama/ollama/api" +) + +func startApp(ctx context.Context, client *api.Client) error { + // log.Printf("XXX Attempting to find and start ollama app") + AppName := "ollama app.exe" + exe, err := os.Executable() + if err != nil { + return err + } + appExe := filepath.Join(filepath.Dir(exe), AppName) + _, err = os.Stat(appExe) + if errors.Is(err, os.ErrNotExist) { + // Try the standard install location + localAppData := os.Getenv("LOCALAPPDATA") + appExe = filepath.Join(localAppData, "Ollama", AppName) + _, err := os.Stat(appExe) + if errors.Is(err, os.ErrNotExist) { + // Finally look in the path + appExe, err = exec.LookPath(AppName) + if err != nil { + return errors.New("could not locate ollama app") + } + } + } + // log.Printf("XXX attempting to start app %s", appExe) + + cmd_path := "c:\\Windows\\system32\\cmd.exe" + cmd := exec.Command(cmd_path, "/c", appExe) + // TODO - these hide flags aren't working - still pops up a command window for some reason + cmd.SysProcAttr = &syscall.SysProcAttr{CreationFlags: 0x08000000, HideWindow: true} + + // TODO this didn't help either... + cmd.Stdin = strings.NewReader("") + cmd.Stdout = os.Stdout + cmd.Stderr = os.Stderr + + if err := cmd.Start(); err != nil { + return fmt.Errorf("unable to start ollama app %w", err) + } + + if cmd.Process != nil { + defer cmd.Process.Release() //nolint:errcheck + } + return waitForServer(ctx, client) +} diff --git a/convert/convert.go b/convert/convert.go new file mode 100644 index 0000000..4a6df66 --- /dev/null +++ b/convert/convert.go @@ -0,0 +1,256 @@ +package convert + +import ( + "cmp" + "encoding/json" + "errors" + "fmt" + "io/fs" + "log/slog" + "os" + "slices" + "strings" + + "github.com/ollama/ollama/fs/ggml" +) + +type ModelParameters struct { + Architectures []string `json:"architectures"` + VocabSize uint32 `json:"vocab_size"` + + TextModel struct { + VocabSize uint32 `json:"vocab_size"` + } `json:"text_config"` +} + +type AdapterParameters struct { + Alpha uint32 `json:"lora_alpha"` + LoraLayers uint32 `json:"lora_layers"` + LoraParameters struct { + Rank uint32 `json:"rank"` + Alpha float32 `json:"alpha"` + Scale float32 `json:"scale"` + } `json:"lora_parameters"` +} + +func (ModelParameters) KV(t *Tokenizer) ggml.KV { + kv := ggml.KV{ + "general.file_type": uint32(1), + "general.quantization_version": uint32(2), + "tokenizer.ggml.pre": t.Pre, + "tokenizer.ggml.model": t.Vocabulary.Model, + "tokenizer.ggml.tokens": t.Vocabulary.Tokens, + "tokenizer.ggml.scores": t.Vocabulary.Scores, + "tokenizer.ggml.token_type": t.Vocabulary.Types, + } + + if len(t.Merges) > 0 { + kv["tokenizer.ggml.merges"] = t.Merges + } + + if t.Template != "" { + kv["tokenizer.chat_template"] = t.Template + } + + for _, sv := range t.SpecialVocabulary { + kv[fmt.Sprintf("tokenizer.ggml.add_%s_token", sv.Key())] = sv.AddToken + kv[fmt.Sprintf("tokenizer.ggml.%s_token_id", sv.Key())] = uint32(sv.ID) + if len(sv.IDs) > 0 { + kv[fmt.Sprintf("tokenizer.ggml.%s_token_ids", sv.Key())] = sv.IDs + } + } + + return kv +} + +func (p AdapterParameters) KV() ggml.KV { + var alpha float32 + if p.LoraParameters.Alpha == 0 { + alpha = float32(p.Alpha) + } else { + alpha = p.LoraParameters.Alpha + } + + kv := ggml.KV{ + "adapter.lora.alpha": alpha, + "adapter.type": "lora", + "general.file_type": uint32(1), + "general.type": "adapter", + "general.version": "v0.2", + } + + return kv +} + +func (ModelParameters) specialTokenTypes() []string { + return []string{ + "bos", "eos", "unk", "sep", "pad", "cls", "mask", + } +} + +type ModelConverter interface { + // KV maps parameters to LLM key-values + KV(*Tokenizer) ggml.KV + // Tensors maps input tensors to LLM tensors. Model specific modifications can be done here. + Tensors([]Tensor) []*ggml.Tensor + // Replacements returns a list of string pairs to replace in tensor names. + // See [strings.Replacer](https://pkg.go.dev/strings#Replacer) for details + Replacements() []string + + // specialTokenTypes returns any special token types the model uses + specialTokenTypes() []string +} + +type moreParser interface { + parseMore(fs.FS) error +} + +type AdapterConverter interface { + // KV maps parameters to LLM key-values + KV(ggml.KV) ggml.KV + // Tensors maps input tensors to LLM tensors. Adapter specific modifications can be done here. + Tensors([]Tensor) []*ggml.Tensor + // Replacements returns a list of string pairs to replace in tensor names. + // See [strings.Replacer](https://pkg.go.dev/strings#Replacer) for details + Replacements() []string +} + +func ConvertAdapter(fsys fs.FS, f *os.File, baseKV ggml.KV) error { + bts, err := fs.ReadFile(fsys, "adapter_config.json") + if err != nil { + return err + } + + var p AdapterParameters + if err := json.Unmarshal(bts, &p); err != nil { + return err + } + + arch, ok := baseKV["general.architecture"] + if !ok { + return errors.New("architecture not set for the base model") + } + + var conv AdapterConverter + switch arch { + case "llama": + conv = &llamaAdapter{} + case "gemma2": + conv = &gemma2Adapter{} + default: + return errors.New("unsupported architecture") + } + + ts, err := parseTensors(fsys, strings.NewReplacer(conv.Replacements()...)) + if err != nil { + return err + } + + if err := json.Unmarshal(bts, conv); err != nil { + return err + } + + return writeFile(f, conv.KV(baseKV), conv.Tensors(ts)) +} + +// Convert writes an Ollama compatible model to the provided io.WriteSeeker based on configurations +// and files it finds in the input path. +// Supported input model formats include safetensors. +// Supported input tokenizers files include tokenizer.json (preferred) and tokenizer.model. +func ConvertModel(fsys fs.FS, f *os.File) error { + bts, err := fs.ReadFile(fsys, "config.json") + if err != nil { + return err + } + + var p ModelParameters + if err := json.Unmarshal(bts, &p); err != nil { + return err + } + + if len(p.Architectures) < 1 { + return errors.New("unknown architecture") + } + + var conv ModelConverter + switch p.Architectures[0] { + case "LlamaForCausalLM": + conv = &llamaModel{} + case "MllamaForConditionalGeneration": + conv = &mllamaModel{} + case "Llama4ForConditionalGeneration": + conv = &llama4Model{} + case "Mistral3ForConditionalGeneration": + conv = &mistral3Model{} + case "MixtralForCausalLM": + conv = &mixtralModel{} + case "GemmaForCausalLM": + conv = &gemmaModel{} + case "Gemma2ForCausalLM": + conv = &gemma2Model{} + case "Gemma3ForCausalLM", "Gemma3ForConditionalGeneration": + conv = &gemma3Model{Architecture: p.Architectures[0]} + case "Phi3ForCausalLM": + conv = &phi3Model{} + case "Qwen2ForCausalLM": + conv = &qwen2Model{} + case "Qwen2_5_VLForConditionalGeneration": + conv = &qwen25VLModel{} + case "BertModel": + conv = &bertModel{} + case "CohereForCausalLM": + conv = &commandrModel{} + default: + return fmt.Errorf("unsupported architecture %q", p.Architectures[0]) + } + + if err := json.Unmarshal(bts, conv); err != nil { + return err + } + + if t, ok := conv.(moreParser); ok { + if err := t.parseMore(fsys); err != nil { + return err + } + } + + t, err := parseTokenizer(fsys, conv.specialTokenTypes()) + if err != nil { + return err + } + + vocabSize := int(cmp.Or(p.VocabSize, p.TextModel.VocabSize)) + + switch { + case vocabSize == 0: + slog.Debug("vocabulary size was not explicitly set by the model", "default size", len(t.Vocabulary.Tokens)) + case vocabSize > len(t.Vocabulary.Tokens): + slog.Debug("vocabulary is smaller than expected, padding with dummy tokens", "expect", vocabSize, "actual", len(t.Vocabulary.Tokens)) + for i := range vocabSize - len(t.Vocabulary.Tokens) { + t.Vocabulary.Tokens = append(t.Vocabulary.Tokens, fmt.Sprintf("[PAD%d]", i)) + t.Vocabulary.Scores = append(t.Vocabulary.Scores, -1) + t.Vocabulary.Types = append(t.Vocabulary.Types, tokenTypeUserDefined) + } + case vocabSize < len(t.Vocabulary.Tokens): + slog.Debug("vocabulary is larger than expected", "want", vocabSize, "got", len(t.Vocabulary.Tokens)) + p.VocabSize = uint32(len(t.Vocabulary.Tokens)) + p.TextModel.VocabSize = uint32(len(t.Vocabulary.Tokens)) + default: + slog.Debug("vocabulary", "size", len(t.Vocabulary.Tokens)) + } + + ts, err := parseTensors(fsys, strings.NewReplacer(conv.Replacements()...)) + if err != nil { + return err + } + + return writeFile(f, conv.KV(t), conv.Tensors(ts)) +} + +func writeFile(f *os.File, kv ggml.KV, ts []*ggml.Tensor) error { + for i := range ts { + ts[i].Shape = slices.Clone(ts[i].Shape) + slices.Reverse(ts[i].Shape) + } + return ggml.WriteGGUF(f, kv, ts) +} diff --git a/convert/convert_bert.go b/convert/convert_bert.go new file mode 100644 index 0000000..a9f4b8a --- /dev/null +++ b/convert/convert_bert.go @@ -0,0 +1,174 @@ +package convert + +import ( + "cmp" + "encoding/json" + "io/fs" + "path/filepath" + "slices" + "strings" + + "github.com/ollama/ollama/fs/ggml" +) + +type bertModel struct { + ModelParameters + NLayers uint32 `json:"n_layers"` + NumHiddenLayers uint32 `json:"num_hidden_layers"` + NLayer uint32 `json:"n_layer"` + MaxPositionEmbeddings uint32 `json:"max_position_embeddings"` + NCtx uint32 `json:"n_ctx"` + HiddenSize uint32 `json:"hidden_size"` + NEmbd uint32 `json:"n_embd"` + IntermediateSize uint32 `json:"intermediate_size"` + NInner uint32 `json:"n_inner"` + NumAttentionHeads uint32 `json:"num_attention_heads"` + NHead uint32 `json:"n_head"` + NumKeyValueHeads uint32 `json:"num_key_value_heads"` + LayerNormEPS float32 `json:"layer_norm_eps"` + LayerNormEpsilon float32 `json:"layer_norm_epsilon"` + NormEpsilon float32 `json:"norm_epsilon"` + + PoolingType uint32 +} + +var ( + _ ModelConverter = (*bertModel)(nil) + _ moreParser = (*bertModel)(nil) +) + +func (p *bertModel) parseMore(fsys fs.FS) error { + bts, err := fs.ReadFile(fsys, "modules.json") + if err != nil { + return err + } + + var modules []struct { + Type string `json:"type"` + Path string `json:"path"` + } + + if err := json.Unmarshal(bts, &modules); err != nil { + return err + } + + var pooling string + for _, m := range modules { + if m.Type == "sentence_transformers.models.Pooling" { + pooling = m.Path + break + } + } + + if pooling != "" { + bts, err := fs.ReadFile(fsys, filepath.Join(pooling, "config.json")) + if err != nil { + return err + } + + var pc struct { + PoolingModeCLSToken bool `json:"pooling_mode_cls_token"` + PoolingModeMeanTokens bool `json:"pooling_mode_mean_tokens"` + } + + if err := json.Unmarshal(bts, &pc); err != nil { + return err + } + + if pc.PoolingModeMeanTokens { + p.PoolingType = 1 + } else if pc.PoolingModeCLSToken { + p.PoolingType = 2 + } + } + + return nil +} + +func (p *bertModel) KV(t *Tokenizer) ggml.KV { + kv := p.ModelParameters.KV(t) + kv["general.architecture"] = "bert" + kv["bert.attention.causal"] = false + kv["bert.pooling_type"] = p.PoolingType + + kv["bert.block_count"] = cmp.Or(p.NLayers, p.NumHiddenLayers, p.NLayer) + + if contextLength := cmp.Or(p.MaxPositionEmbeddings, p.NCtx); contextLength > 0 { + kv["bert.context_length"] = contextLength + } + + if embeddingLength := cmp.Or(p.HiddenSize, p.NEmbd); embeddingLength > 0 { + kv["bert.embedding_length"] = cmp.Or(p.HiddenSize, p.NEmbd) + } + + if feedForwardLength := cmp.Or(p.IntermediateSize, p.NInner); feedForwardLength > 0 { + kv["bert.feed_forward_length"] = cmp.Or(p.IntermediateSize, p.NInner) + } + + if headCount := cmp.Or(p.NumAttentionHeads, p.NHead); headCount > 0 { + kv["bert.attention.head_count"] = cmp.Or(p.NumAttentionHeads, p.NHead) + } + + if layerNormEpsilon := cmp.Or(p.LayerNormEPS, p.LayerNormEpsilon, p.NormEpsilon); layerNormEpsilon > 0 { + kv["bert.attention.layer_norm_epsilon"] = layerNormEpsilon + } + + kv["tokenizer.ggml.model"] = "bert" + kv["tokenizer.ggml.token_type_count"] = uint32(2) + + // convert to phantom space tokens + for i, e := range t.Tokens { + if strings.HasPrefix(e, "[") && strings.HasSuffix(e, "]") { + // noop + } else if strings.HasPrefix(e, "##") { + t.Tokens[i] = e[2:] + } else { + t.Tokens[i] = "\u2581" + e + } + } + + kv["tokenizer.ggml.tokens"] = t.Tokens + + return kv +} + +func (p *bertModel) Tensors(ts []Tensor) []*ggml.Tensor { + var out []*ggml.Tensor + for _, t := range ts { + if slices.Contains([]string{ + "embeddings.position_ids", + "pooler.dense.weight", + "pooler.dense.bias", + }, t.Name()) { + continue + } + + out = append(out, &ggml.Tensor{ + Name: t.Name(), + Kind: t.Kind(), + Shape: t.Shape(), + WriterTo: t, + }) + } + + return out +} + +func (bertModel) Replacements() []string { + return []string{ + "encoder.layer", "blk", + "encoder.layers", "blk", + "embeddings.word_embeddings", "token_embd", + "embeddings.token_type_embeddings", "token_types", + "embeddings.LayerNorm", "token_embd_norm", + "embeddings.position_embeddings", "position_embd", + "attention.self.query", "attn_q", + "attention.self.key", "attn_k", + "attention.self.value", "attn_v", + "attention.output.dense", "attn_output", + "attention.output.LayerNorm", "attn_output_norm", + "intermediate.dense", "ffn_up", + "output.dense", "ffn_down", + "output.LayerNorm", "layer_output_norm", + } +} diff --git a/convert/convert_commandr.go b/convert/convert_commandr.go new file mode 100644 index 0000000..a909515 --- /dev/null +++ b/convert/convert_commandr.go @@ -0,0 +1,76 @@ +package convert + +import ( + "cmp" + + "github.com/ollama/ollama/fs/ggml" +) + +type commandrModel struct { + ModelParameters + MaxPositionEmbeddings uint32 `json:"max_position_embeddings"` + HiddenSize uint32 `json:"hidden_size"` + HiddenLayers uint32 `json:"num_hidden_layers"` + IntermediateSize uint32 `json:"intermediate_size"` + NumAttentionHeads uint32 `json:"num_attention_heads"` + NumKeyValueHeads uint32 `json:"num_key_value_heads"` + LayerNormEPS float32 `json:"layer_norm_eps"` + RopeTheta float32 `json:"rope_theta"` + UseQKNorm bool `json:"use_qk_norm"` + MaxLength uint32 `json:"model_max_length"` + LogitScale float32 `json:"logit_scale"` + NCtx uint32 `json:"n_ctx"` +} + +var _ ModelConverter = (*commandrModel)(nil) + +func (p *commandrModel) KV(t *Tokenizer) ggml.KV { + kv := p.ModelParameters.KV(t) + kv["general.architecture"] = "command-r" + kv["general.name"] = "command-r" + kv["command-r.context_length"] = cmp.Or(p.MaxLength, p.MaxPositionEmbeddings, p.NCtx) + kv["command-r.embedding_length"] = p.HiddenSize + kv["command-r.block_count"] = p.HiddenLayers + kv["command-r.feed_forward_length"] = p.IntermediateSize + kv["command-r.attention.head_count"] = p.NumAttentionHeads + kv["command-r.attention.head_count_kv"] = p.NumKeyValueHeads + kv["command-r.attention.layer_norm_epsilon"] = p.LayerNormEPS + kv["command-r.rope.freq_base"] = p.RopeTheta + kv["command-r.max_position_embeddings"] = cmp.Or(p.MaxLength, p.MaxPositionEmbeddings) + kv["command-r.logit_scale"] = p.LogitScale + kv["command-r.rope.scaling.type"] = "none" + + return kv +} + +func (p *commandrModel) Tensors(ts []Tensor) []*ggml.Tensor { + var out []*ggml.Tensor + for _, t := range ts { + out = append(out, &ggml.Tensor{ + Name: t.Name(), + Kind: t.Kind(), + Shape: t.Shape(), + WriterTo: t, + }) + } + + return out +} + +func (p *commandrModel) Replacements() []string { + return []string{ + "self_attn.q_norm", "attn_q_norm", + "self_attn.k_norm", "attn_k_norm", + "model.layers", "blk", + "input_layernorm", "attn_norm", + "mlp.down_proj", "ffn_down", + "mlp.gate_proj", "ffn_gate", + "mlp.up_proj", "ffn_up", + "self_attn.k_proj", "attn_k", + "self_attn.o_proj", "attn_output", + "self_attn.q_proj", "attn_q", + "self_attn.v_proj", "attn_v", + "model.norm", "output_norm", + "model.embed_tokens", "token_embd", + } +} diff --git a/convert/convert_gemma.go b/convert/convert_gemma.go new file mode 100644 index 0000000..26698d6 --- /dev/null +++ b/convert/convert_gemma.go @@ -0,0 +1,100 @@ +package convert + +import ( + "strings" + + "github.com/pdevine/tensor" + "github.com/pdevine/tensor/native" + + "github.com/ollama/ollama/fs/ggml" +) + +type gemmaModel struct { + ModelParameters + MaxPositionEmbeddings uint32 `json:"max_position_embeddings"` + HiddenSize uint32 `json:"hidden_size"` + HiddenLayers uint32 `json:"num_hidden_layers"` + IntermediateSize uint32 `json:"intermediate_size"` + NumAttentionHeads uint32 `json:"num_attention_heads"` + NumKeyValueHeads uint32 `json:"num_key_value_heads"` + RMSNormEPS float32 `json:"rms_norm_eps"` + HeadDim uint32 `json:"head_dim"` +} + +var _ ModelConverter = (*gemmaModel)(nil) + +func (p *gemmaModel) KV(t *Tokenizer) ggml.KV { + kv := p.ModelParameters.KV(t) + kv["general.architecture"] = "gemma" + kv["gemma.context_length"] = p.MaxPositionEmbeddings + kv["gemma.embedding_length"] = p.HiddenSize + kv["gemma.block_count"] = p.HiddenLayers + kv["gemma.feed_forward_length"] = p.IntermediateSize + kv["gemma.attention.head_count"] = p.NumAttentionHeads + kv["gemma.attention.head_count_kv"] = p.NumKeyValueHeads + kv["gemma.attention.layer_norm_rms_epsilon"] = p.RMSNormEPS + kv["gemma.attention.key_length"] = p.HeadDim + kv["gemma.attention.value_length"] = p.HeadDim + kv["tokenizer.ggml.eot_token_id"] = uint32(107) + kv["tokenizer.ggml.middle_token_id"] = uint32(68) + kv["tokenizer.ggml.prefix_token_id"] = uint32(67) + kv["tokenizer.ggml.suffix_token_id"] = uint32(69) + return kv +} + +func (p *gemmaModel) Tensors(ts []Tensor) []*ggml.Tensor { + var out []*ggml.Tensor + for _, t := range ts { + if !strings.HasPrefix(t.Name(), "v.") && strings.HasSuffix(t.Name(), "_norm.weight") { + t.SetRepacker(p.addOne) + } + + out = append(out, &ggml.Tensor{ + Name: t.Name(), + Kind: t.Kind(), + Shape: t.Shape(), + WriterTo: t, + }) + } + + return out +} + +func (p *gemmaModel) Replacements() []string { + return []string{ + "model.embed_tokens", "token_embd", + "model.norm", "output_norm", + "model.layers", "blk", + "input_layernorm", "attn_norm", + "self_attn.q_proj", "attn_q", + "self_attn.k_proj", "attn_k", + "self_attn.v_proj", "attn_v", + "self_attn.o_proj", "attn_output", + "mlp.gate_proj", "ffn_gate", + "mlp.down_proj", "ffn_down", + "mlp.up_proj", "ffn_up", + "post_attention_layernorm", "ffn_norm", + } +} + +func (*gemmaModel) addOne(_ string, data []float32, shape []uint64) ([]float32, error) { + n := tensor.New(tensor.WithShape(int(shape[0])), tensor.WithBacking(data)) + ones := tensor.Ones(tensor.Float32, int(shape[0])) + + n, err := n.Add(ones) + if err != nil { + return nil, err + } + + ts, err := native.SelectF32(n, 0) + if err != nil { + return nil, err + } + + var f32s []float32 + for _, t := range ts { + f32s = append(f32s, t...) + } + + return f32s, nil +} diff --git a/convert/convert_gemma2.go b/convert/convert_gemma2.go new file mode 100644 index 0000000..4917e42 --- /dev/null +++ b/convert/convert_gemma2.go @@ -0,0 +1,51 @@ +package convert + +import "github.com/ollama/ollama/fs/ggml" + +type gemma2Model struct { + gemmaModel + SlidingWindow uint32 `json:"sliding_window"` + AttentionLogitSoftcap float32 `json:"attn_logit_softcapping"` + FinalLogitSoftcap float32 `json:"final_logit_softcapping"` +} + +func (p *gemma2Model) KV(t *Tokenizer) ggml.KV { + kv := p.ModelParameters.KV(t) + kv["general.architecture"] = "gemma2" + kv["gemma2.context_length"] = p.MaxPositionEmbeddings + kv["gemma2.embedding_length"] = p.HiddenSize + kv["gemma2.block_count"] = p.HiddenLayers + kv["gemma2.feed_forward_length"] = p.IntermediateSize + kv["gemma2.attention.head_count"] = p.NumAttentionHeads + kv["gemma2.attention.head_count_kv"] = p.NumKeyValueHeads + kv["gemma2.attention.layer_norm_rms_epsilon"] = p.RMSNormEPS + kv["gemma2.attention.key_length"] = p.HeadDim + kv["gemma2.attention.value_length"] = p.HeadDim + kv["gemma2.attention.sliding_window"] = p.SlidingWindow + kv["gemma2.attn_logit_softcapping"] = p.AttentionLogitSoftcap + kv["gemma2.final_logit_softcapping"] = p.FinalLogitSoftcap + kv["tokenizer.ggml.eot_token_id"] = uint32(107) + kv["tokenizer.ggml.middle_token_id"] = uint32(68) + kv["tokenizer.ggml.prefix_token_id"] = uint32(67) + kv["tokenizer.ggml.suffix_token_id"] = uint32(69) + return kv +} + +func (p *gemma2Model) Replacements() []string { + return []string{ + "model.embed_tokens", "token_embd", + "model.norm", "output_norm", + "model.layers", "blk", + "input_layernorm", "attn_norm", + "self_attn.q_proj", "attn_q", + "self_attn.k_proj", "attn_k", + "self_attn.v_proj", "attn_v", + "self_attn.o_proj", "attn_output", + "mlp.gate_proj", "ffn_gate", + "mlp.down_proj", "ffn_down", + "mlp.up_proj", "ffn_up", + "post_attention_layernorm", "post_attention_norm", + "pre_feedforward_layernorm", "ffn_norm", + "post_feedforward_layernorm", "post_ffw_norm", + } +} diff --git a/convert/convert_gemma2_adapter.go b/convert/convert_gemma2_adapter.go new file mode 100644 index 0000000..6299cd9 --- /dev/null +++ b/convert/convert_gemma2_adapter.go @@ -0,0 +1,91 @@ +package convert + +import ( + "strings" + + "github.com/pdevine/tensor" + "github.com/pdevine/tensor/native" + + "github.com/ollama/ollama/fs/ggml" +) + +type gemma2Adapter struct { + AdapterParameters +} + +var _ AdapterConverter = (*gemma2Adapter)(nil) + +func (p *gemma2Adapter) KV(baseKV ggml.KV) ggml.KV { + kv := p.AdapterParameters.KV() + kv["general.architecture"] = "gemma2" + return kv +} + +func (p *gemma2Adapter) Tensors(ts []Tensor) []*ggml.Tensor { + var out []*ggml.Tensor + for _, t := range ts { + shape := t.Shape() + if (strings.HasSuffix(t.Name(), "weight.lora_a") && shape[0] > shape[1]) || + (strings.HasSuffix(t.Name(), "weight.lora_b") && shape[0] < shape[1]) { + shape[0], shape[1] = shape[1], shape[0] + t.SetRepacker(p.repack) + } + + out = append(out, &ggml.Tensor{ + Name: t.Name(), + Kind: t.Kind(), + Shape: t.Shape(), + WriterTo: t, + }) + } + + return out +} + +func (p *gemma2Adapter) Replacements() []string { + return []string{ + "base_model.model.", "", + "model.layers", "blk", + "self_attn.q_proj", "attn_q", + "self_attn.k_proj", "attn_k", + "self_attn.v_proj", "attn_v", + "self_attn.o_proj", "attn_output", + "mlp.gate_proj", "ffn_gate", + "mlp.down_proj", "ffn_down", + "mlp.up_proj", "ffn_up", + "lora_A.weight", "weight.lora_a", + "lora_B.weight", "weight.lora_b", + "lora_a", "weight.lora_a", + "lora_b", "weight.lora_b", + } +} + +func (p *gemma2Adapter) repack(name string, data []float32, shape []uint64) ([]float32, error) { + dims := []int{int(shape[1]), int(shape[0])} + + n := tensor.New(tensor.WithShape(dims...), tensor.WithBacking(data)) + + if err := n.T(1, 0); err != nil { + return nil, err + } + + if err := n.Reshape(dims...); err != nil { + return nil, err + } + + if err := n.Transpose(); err != nil { + return nil, err + } + + ts, err := native.SelectF32(n, 1) + if err != nil { + return nil, err + } + + var f32s []float32 + for _, t := range ts { + f32s = append(f32s, t...) + } + + return f32s, nil +} diff --git a/convert/convert_gemma3.go b/convert/convert_gemma3.go new file mode 100644 index 0000000..27b99f5 --- /dev/null +++ b/convert/convert_gemma3.go @@ -0,0 +1,142 @@ +package convert + +import ( + "cmp" + + "github.com/ollama/ollama/fs/ggml" +) + +type gemma3Model struct { + gemmaModel + Architecture string + TextModel struct { + HeadDim uint32 `json:"head_dim"` + HiddenSize uint32 `json:"hidden_size"` + HiddenLayers uint32 `json:"num_hidden_layers"` + IntermediateSize uint32 `json:"intermediate_size"` + SlidingWindow uint32 `json:"sliding_window"` + } `json:"text_config"` + VisionModel struct { + NumAttentionHeads uint32 `json:"num_attention_heads"` // attention.head_count 16 + LayerNormEpsilon float32 `json:"layer_norm_eps"` // attention.layer_norm_epsilon 1e-05 + NumHiddenLayers uint32 `json:"num_hidden_layers"` // block_count 32 + HiddenSize uint32 `json:"hidden_size"` // embedding_length 1280 + IntermediateSize uint32 `json:"intermediate_size"` // feed_forward_length 5120 + ImageSize uint32 `json:"image_size"` // image_size 560 + NumChannels uint32 `json:"num_channels"` // num_channels 3 + PatchSize uint32 `json:"patch_size"` // patch_size 14 + } `json:"vision_config"` + MaxPositionEmbeddings uint32 `json:"max_position_embeddings"` + NumAttentionHeads uint32 `json:"num_attention_heads"` + NumKeyValueHeads uint32 `json:"num_key_value_heads"` + RMSNormEPS float32 `json:"rms_norm_eps"` + HeadDim uint32 `json:"head_dim"` + FinalLogitSoftcap float32 `json:"final_logit_softcapping"` + RopeLocalTheta float32 `json:"rope_local_base_freq"` + RopeGlobalTheta float32 `json:"rope_global_base_freq"` + SlidingWindow uint32 `json:"sliding_window"` + MultiModalTokensPerImage uint32 `json:"mm_tokens_per_image"` +} + +const ( + gemma4BLayerCount = 34 + gemma12BLayerCount = 48 + gemma27BLayerCount = 62 +) + +func (p *gemma3Model) KV(t *Tokenizer) ggml.KV { + kv := p.ModelParameters.KV(t) + kv["general.architecture"] = "gemma3" + + numBlocks := cmp.Or(p.HiddenLayers, p.TextModel.HiddenLayers) + kv["gemma3.block_count"] = numBlocks + + var ( + numHeads uint32 + numKVHeads uint32 + ) + + switch numBlocks { + case gemma4BLayerCount: + numHeads = 8 + numKVHeads = 4 + case gemma12BLayerCount: + numHeads = 16 + numKVHeads = 8 + case gemma27BLayerCount: + numHeads = 32 + numKVHeads = 16 + default: + numHeads = p.NumAttentionHeads + numKVHeads = p.NumKeyValueHeads + } + + kv["gemma3.attention.head_count"] = numHeads + kv["gemma3.attention.head_count_kv"] = numKVHeads + + switch p.Architecture { + case "Gemma3ForCausalLM": + kv["gemma3.context_length"] = p.MaxPositionEmbeddings + kv["gemma3.attention.layer_norm_rms_epsilon"] = p.RMSNormEPS + kv["gemma3.attention.key_length"] = p.HeadDim + kv["gemma3.attention.value_length"] = p.HeadDim + kv["gemma3.attention.sliding_window"] = p.SlidingWindow + kv["gemma3.final_logit_softcapping"] = cmp.Or(p.FinalLogitSoftcap, 30) + kv["gemma3.rope.local.freq_base"] = cmp.Or(p.RopeLocalTheta, 10000.0) + kv["gemma3.rope.global.freq_base"] = cmp.Or(p.RopeGlobalTheta, 1000000.0) + kv["gemma3.embedding_length"] = p.HiddenSize + kv["gemma3.feed_forward_length"] = p.IntermediateSize + default: + kv["gemma3.context_length"] = cmp.Or(p.MaxPositionEmbeddings, 131072) + kv["gemma3.embedding_length"] = p.TextModel.HiddenSize + kv["gemma3.feed_forward_length"] = p.TextModel.IntermediateSize + kv["gemma3.attention.sliding_window"] = p.TextModel.SlidingWindow + kv["gemma3.vision.block_count"] = p.VisionModel.NumHiddenLayers + kv["gemma3.vision.embedding_length"] = p.VisionModel.HiddenSize + kv["gemma3.vision.feed_forward_length"] = p.VisionModel.IntermediateSize + kv["gemma3.vision.image_size"] = p.VisionModel.ImageSize + kv["gemma3.vision.patch_size"] = p.VisionModel.PatchSize + kv["gemma3.vision.num_channels"] = cmp.Or(p.VisionModel.NumChannels, 3) + kv["gemma3.vision.attention.head_count"] = p.VisionModel.NumAttentionHeads + kv["gemma3.vision.attention.layer_norm_epsilon"] = cmp.Or(p.VisionModel.LayerNormEpsilon, 1e-6) + kv["gemma3.attention.key_length"] = cmp.Or(p.TextModel.HeadDim, 256) + kv["gemma3.attention.value_length"] = cmp.Or(p.TextModel.HeadDim, 256) + } + + if p.MultiModalTokensPerImage > 0 { + kv["gemma3.mm.tokens_per_image"] = p.MultiModalTokensPerImage + } + + return kv +} + +func (p *gemma3Model) Replacements() []string { + return []string{ + "lm_head", "output", + "model.embed_tokens", "token_embd", + "model.norm", "output_norm", + "vision_tower.vision_model.embeddings", "v", + "vision_tower.vision_model", "v", + "vision_model.vision_model.embeddings", "v", + "vision_model.vision_model", "v", + "language_model.", "", + "model.layers", "blk", + "encoder.layers", "blk", + "input_layernorm", "attn_norm", + "self_attn.q_proj", "attn_q", + "self_attn.q_norm", "attn_q_norm", + "self_attn.k_proj", "attn_k", + "self_attn.k_norm", "attn_k_norm", + "self_attn.v_proj", "attn_v", + "self_attn.o_proj", "attn_output", + "self_attn.out_proj", "attn_output", + "mlp.gate_proj", "ffn_gate", + "mlp.down_proj", "ffn_down", + "mlp.up_proj", "ffn_up", + "post_attention_layernorm", "post_attention_norm", + "pre_feedforward_layernorm", "ffn_norm", + "post_feedforward_layernorm", "post_ffw_norm", + "input_projection_weight", "input_projection.weight", + "multi_modal_projector", "mm", + } +} diff --git a/convert/convert_llama.go b/convert/convert_llama.go new file mode 100644 index 0000000..4396974 --- /dev/null +++ b/convert/convert_llama.go @@ -0,0 +1,221 @@ +package convert + +import ( + "cmp" + "fmt" + "math" + "strings" + + "github.com/pdevine/tensor" + "github.com/pdevine/tensor/native" + + "github.com/ollama/ollama/fs/ggml" +) + +type llamaModel struct { + ModelParameters + NLayers uint32 `json:"n_layers"` + NumHiddenLayers uint32 `json:"num_hidden_layers"` + NLayer uint32 `json:"n_layer"` + MaxPositionEmbeddings uint32 `json:"max_position_embeddings"` + NCtx uint32 `json:"n_ctx"` + HiddenSize uint32 `json:"hidden_size"` + NEmbd uint32 `json:"n_embd"` + IntermediateSize uint32 `json:"intermediate_size"` + NInner uint32 `json:"n_inner"` + NumAttentionHeads uint32 `json:"num_attention_heads"` + NHead uint32 `json:"n_head"` + NumKeyValueHeads uint32 `json:"num_key_value_heads"` + RopeTheta float32 `json:"rope_theta"` + RopeScaling struct { + Type string `json:"type"` + RopeType string `json:"rope_type"` + Factor float32 `json:"factor"` + LowFrequencyFactor float32 `json:"low_freq_factor"` + HighFrequencyFactor float32 `json:"high_freq_factor"` + OriginalMaxPositionEmbeddings uint32 `json:"original_max_position_embeddings"` + + factors ropeFactor + } `json:"rope_scaling"` + RMSNormEPS float32 `json:"rms_norm_eps"` + LayerNormEPS float32 `json:"layer_norm_eps"` + LayerNormEpsilon float32 `json:"layer_norm_epsilon"` + NormEpsilon float32 `json:"norm_epsilon"` + HeadDim uint32 `json:"head_dim"` + + skipRepack bool +} + +var _ ModelConverter = (*llamaModel)(nil) + +func (p *llamaModel) KV(t *Tokenizer) ggml.KV { + kv := p.ModelParameters.KV(t) + kv["general.architecture"] = "llama" + kv["llama.vocab_size"] = p.VocabSize + + kv["llama.block_count"] = cmp.Or(p.NLayers, p.NumHiddenLayers, p.NLayer) + + if contextLength := cmp.Or(p.MaxPositionEmbeddings, p.NCtx); contextLength > 0 { + kv["llama.context_length"] = contextLength + } + + if embeddingLength := cmp.Or(p.HiddenSize, p.NEmbd); embeddingLength > 0 { + kv["llama.embedding_length"] = cmp.Or(p.HiddenSize, p.NEmbd) + } + + if feedForwardLength := cmp.Or(p.IntermediateSize, p.NInner); feedForwardLength > 0 { + kv["llama.feed_forward_length"] = cmp.Or(p.IntermediateSize, p.NInner) + } + + if headCount := cmp.Or(p.NumAttentionHeads, p.NHead); headCount > 0 { + kv["llama.attention.head_count"] = cmp.Or(p.NumAttentionHeads, p.NHead) + kv["llama.rope.dimension_count"] = p.HiddenSize / headCount + } + + if p.HeadDim > 0 { + kv["llama.attention.head_dim"] = p.HeadDim + } + + if p.RopeTheta > 0 { + kv["llama.rope.freq_base"] = p.RopeTheta + } + + if p.RopeScaling.Type == "linear" { + kv["llama.rope.scaling.type"] = p.RopeScaling.Type + kv["llama.rope.scaling.factor"] = p.RopeScaling.Factor + } else if p.RopeScaling.RopeType == "llama3" { + dim := p.HiddenSize / p.NumAttentionHeads + for i := uint32(0); i < dim; i += 2 { + factor := cmp.Or(p.RopeScaling.Factor, 8.0) + factorLow := cmp.Or(p.RopeScaling.LowFrequencyFactor, 1.0) + factorHigh := cmp.Or(p.RopeScaling.HighFrequencyFactor, 4.0) + + original := cmp.Or(p.RopeScaling.OriginalMaxPositionEmbeddings, 8192) + lambdaLow := float32(original) / factorLow + lambdaHigh := float32(original) / factorHigh + + lambda := 2 * math.Pi * math.Pow(float64(p.RopeTheta), float64(i)/float64(dim)) + if lambda < float64(lambdaHigh) { + p.RopeScaling.factors = append(p.RopeScaling.factors, 1.0) + } else if lambda > float64(lambdaLow) { + p.RopeScaling.factors = append(p.RopeScaling.factors, factor) + } else { + smooth := (float32(original)/float32(lambda) - factorLow) / (factorHigh - factorLow) + p.RopeScaling.factors = append(p.RopeScaling.factors, 1.0/((1-smooth)/factor+smooth)) + } + } + } + + if p.NumKeyValueHeads > 0 { + kv["llama.attention.head_count_kv"] = p.NumKeyValueHeads + } + + if p.RMSNormEPS > 0 { + kv["llama.attention.layer_norm_rms_epsilon"] = p.RMSNormEPS + } + + if layerNormEpsilon := cmp.Or(p.LayerNormEPS, p.LayerNormEpsilon, p.NormEpsilon); layerNormEpsilon > 0 { + kv["llama.attention.layer_norm_epsilon"] = layerNormEpsilon + } + + if p.HeadDim > 0 { + kv["llama.attention.key_length"] = p.HeadDim + kv["llama.attention.value_length"] = p.HeadDim + } + + return kv +} + +func (p *llamaModel) Tensors(ts []Tensor) []*ggml.Tensor { + var out []*ggml.Tensor + + if p.RopeScaling.factors != nil { + out = append(out, &ggml.Tensor{ + Name: "rope_freqs.weight", + Kind: 0, + Shape: []uint64{uint64(len(p.RopeScaling.factors))}, + WriterTo: p.RopeScaling.factors, + }) + } + + for _, t := range ts { + if strings.HasSuffix(t.Name(), "attn_q.weight") || strings.HasSuffix(t.Name(), "attn_k.weight") || + strings.HasSuffix(t.Name(), "attn_q_proj.weight") || strings.HasSuffix(t.Name(), "attn_k_proj.weight") { + if !p.skipRepack { + t.SetRepacker(p.repack) + } + } + + out = append(out, &ggml.Tensor{ + Name: t.Name(), + Kind: t.Kind(), + Shape: t.Shape(), + WriterTo: t, + }) + } + + return out +} + +func (p *llamaModel) Replacements() []string { + return []string{ + "lm_head", "output", + "model.embed_tokens", "token_embd", + "model.norm", "output_norm", + "model.layers", "blk", + "input_layernorm", "attn_norm", + "self_attn.q_proj", "attn_q", + "self_attn.k_proj", "attn_k", + "self_attn.v_proj", "attn_v", + "self_attn.o_proj", "attn_output", + "mlp.gate_proj", "ffn_gate", + "mlp.down_proj", "ffn_down", + "mlp.up_proj", "ffn_up", + "post_attention_layernorm", "ffn_norm", + } +} + +func (p *llamaModel) repack(name string, data []float32, shape []uint64) ([]float32, error) { + var dims []int + for _, dim := range shape { + dims = append(dims, int(dim)) + } + + var heads uint32 + if strings.HasSuffix(name, "attn_q.weight") || strings.HasSuffix(name, "attn_q_proj.weight") { + heads = p.NumAttentionHeads + } else if strings.HasSuffix(name, "attn_k.weight") || strings.HasSuffix(name, "attn_k_proj.weight") { + heads = cmp.Or(p.NumKeyValueHeads, p.NumAttentionHeads) + } else { + return nil, fmt.Errorf("unknown tensor for repack: %s", name) + } + + n := tensor.New(tensor.WithShape(dims...), tensor.WithBacking(data)) + if err := n.Reshape(append([]int{int(heads), 2, dims[0] / int(heads) / 2}, dims[1:]...)...); err != nil { + return nil, err + } + + if err := n.T(0, 2, 1, 3); err != nil { + return nil, err + } + + if err := n.Reshape(dims...); err != nil { + return nil, err + } + + if err := n.Transpose(); err != nil { + return nil, err + } + + ts, err := native.SelectF32(n, 1) + if err != nil { + return nil, err + } + + var f32s []float32 + for _, t := range ts { + f32s = append(f32s, t...) + } + + return f32s, nil +} diff --git a/convert/convert_llama4.go b/convert/convert_llama4.go new file mode 100644 index 0000000..3e37923 --- /dev/null +++ b/convert/convert_llama4.go @@ -0,0 +1,169 @@ +package convert + +import ( + "slices" + "strings" + + "github.com/pdevine/tensor" + "github.com/pdevine/tensor/native" + + "github.com/ollama/ollama/fs/ggml" +) + +type llama4Model struct { + ModelParameters + TextModel struct { + llamaModel + NumExpertsPerToken uint32 `json:"num_experts_per_tok"` + NumLocalExperts uint32 `json:"num_local_experts"` + InterleaveMOELayerStep uint32 `json:"interleave_moe_layer_step"` + UseQKNorm bool `json:"use_qk_norm"` + IntermediateSizeMLP uint32 `json:"intermediate_size_mlp"` + AttentionChunkSize uint32 `json:"attention_chunk_size"` + } `json:"text_config"` + VisionModel struct { + NumHiddenLayers uint32 `json:"num_hidden_layers"` + HiddenSize uint32 `json:"hidden_size"` + IntermediateSize uint32 `json:"intermediate_size"` + NumAttentionHeads uint32 `json:"num_attention_heads"` + ImageSize uint32 `json:"image_size"` + PatchSize uint32 `json:"patch_size"` + RopeTheta float32 `json:"rope_theta"` + NormEpsilon float32 `json:"norm_eps"` + PixelShuffleRatio float32 `json:"pixel_shuffle_ratio"` + } `json:"vision_config"` +} + +// KV implements ModelConverter. +func (p *llama4Model) KV(t *Tokenizer) ggml.KV { + kv := p.ModelParameters.KV(t) + kv["general.architecture"] = "llama4" + + for k, v := range p.TextModel.KV(t) { + if strings.HasPrefix(k, "llama.") { + kv[strings.ReplaceAll(k, "llama.", "llama4.")] = v + } + } + + kv["llama4.feed_forward_length"] = p.TextModel.IntermediateSizeMLP + kv["llama4.expert_feed_forward_length"] = p.TextModel.IntermediateSize + + kv["llama4.expert_count"] = p.TextModel.NumLocalExperts + kv["llama4.expert_used_count"] = p.TextModel.NumExpertsPerToken + kv["llama4.interleave_moe_layer_step"] = p.TextModel.InterleaveMOELayerStep + kv["llama4.use_qk_norm"] = p.TextModel.UseQKNorm + kv["llama4.attention.chunk_size"] = p.TextModel.AttentionChunkSize + + kv["llama4.vision.block_count"] = p.VisionModel.NumHiddenLayers + kv["llama4.vision.embedding_length"] = p.VisionModel.HiddenSize + kv["llama4.vision.feed_forward_length"] = p.VisionModel.IntermediateSize + kv["llama4.vision.attention.head_count"] = p.VisionModel.NumAttentionHeads + kv["llama4.vision.image_size"] = p.VisionModel.ImageSize + kv["llama4.vision.patch_size"] = p.VisionModel.PatchSize + kv["llama4.vision.rope.freq_base"] = p.VisionModel.RopeTheta + kv["llama4.vision.layer_norm_epsilon"] = p.VisionModel.NormEpsilon + kv["llama4.vision.pixel_shuffle_ratio"] = p.VisionModel.PixelShuffleRatio + return kv +} + +// Replacements implements ModelConverter. +func (p *llama4Model) Replacements() []string { + return append( + p.TextModel.Replacements(), + "language_model.", "", + "vision_model", "v", + "multi_modal_projector", "mm", + "feed_forward.down_proj", "ffn_down", + "feed_forward.up_proj", "ffn_up", + "feed_forward.gate_proj", "ffn_gate", + "feed_forward.", "ffn_", + "shared_expert.down_proj", "down_shexp", + "shared_expert.gate_proj", "gate_shexp", + "shared_expert.up_proj", "up_shexp", + "experts.down_proj", "down_exps.weight", + "experts.gate_up_proj", "gate_up_exps.weight", + "router", "gate_inp", + "patch_embedding.linear", "patch_embedding", + ) +} + +// Tensors implements ModelConverter. +func (p *llama4Model) Tensors(ts []Tensor) []*ggml.Tensor { + var out []*ggml.Tensor + + var textTensors []Tensor + for _, t := range ts { + if strings.HasPrefix(t.Name(), "v.") || strings.HasPrefix(t.Name(), "mm.") { + out = append(out, &ggml.Tensor{ + Name: t.Name(), + Kind: t.Kind(), + Shape: t.Shape(), + WriterTo: t, + }) + } else if strings.Contains(t.Name(), "ffn_gate_up_exps") { + // gate and up projectors are fused + // dims[1], dims[2] must be swapped + // [experts, hidden_size, intermediate_size * 2] --> [experts, intermediate_size, hidden_size] + halfDim := int(t.Shape()[2]) / 2 + + newShape := slices.Clone(t.Shape()) + newShape[1], newShape[2] = newShape[2]/2, newShape[1] + for i, name := range []string{"ffn_gate_exps", "ffn_up_exps"} { + // clone tensor since we need separate repackers + tt := t.Clone() + tt.SetRepacker(p.repack(nil, nil, tensor.S(i*halfDim, (i+1)*halfDim))) + out = append(out, &ggml.Tensor{ + Name: strings.ReplaceAll(tt.Name(), "ffn_gate_up_exps", name), + Kind: tt.Kind(), + Shape: newShape, + WriterTo: tt, + }) + } + } else if strings.Contains(t.Name(), "ffn_down_exps") { + // dims[1], dims[2] must be swapped + // [experts, intermediate_size, hidden_size] --> [experts, hidden_size, intermediate_size] + t.SetRepacker(p.repack()) + newShape := slices.Clone(t.Shape()) + newShape[1], newShape[2] = newShape[2], newShape[1] + out = append(out, &ggml.Tensor{ + Name: t.Name(), + Kind: t.Kind(), + Shape: newShape, + WriterTo: t, + }) + } else { + textTensors = append(textTensors, t) + } + } + + p.TextModel.skipRepack = true + out = append(out, p.TextModel.Tensors(textTensors)...) + return out +} + +func (p *llama4Model) repack(slice ...tensor.Slice) Repacker { + return func(name string, data []float32, shape []uint64) ([]float32, error) { + dims := make([]int, len(shape)) + for i, dim := range shape { + dims[i] = int(dim) + } + + var t tensor.Tensor = tensor.New(tensor.WithShape(dims...), tensor.WithBacking(data)) + t, err := t.Slice(slice...) + if err != nil { + return nil, err + } + + if err := t.T(0, 2, 1); err != nil { + return nil, err + } + + t = tensor.Materialize(t) + // flatten tensor so it can be return as a vector + if err := t.Reshape(t.Shape().TotalSize()); err != nil { + return nil, err + } + + return native.VectorF32(t.(*tensor.Dense)) + } +} diff --git a/convert/convert_llama_adapter.go b/convert/convert_llama_adapter.go new file mode 100644 index 0000000..4cc4511 --- /dev/null +++ b/convert/convert_llama_adapter.go @@ -0,0 +1,169 @@ +package convert + +import ( + "cmp" + "strings" + + "github.com/pdevine/tensor" + "github.com/pdevine/tensor/native" + + "github.com/ollama/ollama/fs/ggml" +) + +type llamaAdapter struct { + AdapterParameters + NumAttentionHeads uint32 `json:"num_attention_heads"` + NumKeyValueHeads uint32 `json:"num_key_value_heads"` +} + +var _ AdapterConverter = (*llamaAdapter)(nil) + +func (p *llamaAdapter) KV(baseKV ggml.KV) ggml.KV { + kv := p.AdapterParameters.KV() + kv["general.architecture"] = "llama" + kv["llama.attention.head_count"] = baseKV["llama.attention.head_count"] + kv["llama.attention.head_count_kv"] = baseKV["llama.attention.head_count_kv"] + + p.NumAttentionHeads = baseKV["llama.attention.head_count"].(uint32) + + return kv +} + +func (p *llamaAdapter) Tensors(ts []Tensor) []*ggml.Tensor { + var out []*ggml.Tensor + for _, t := range ts { + shape := t.Shape() + if (strings.HasSuffix(t.Name(), "weight.lora_a") && shape[0] > shape[1]) || + (strings.HasSuffix(t.Name(), "weight.lora_b") && shape[0] < shape[1]) { + shape[0], shape[1] = shape[1], shape[0] + t.SetRepacker(p.repackAndTranspose) + } else { + t.SetRepacker(p.repack) + } + + out = append(out, &ggml.Tensor{ + Name: t.Name(), + Kind: t.Kind(), + Shape: shape, + WriterTo: t, + }) + } + + return out +} + +func (p *llamaAdapter) Replacements() []string { + return []string{ + "base_model.model.", "", + "model.layers", "blk", + "self_attn.q_proj", "attn_q", + "self_attn.k_proj", "attn_k", + "self_attn.v_proj", "attn_v", + "self_attn.o_proj", "attn_output", + "mlp.gate_proj", "ffn_gate", + "mlp.down_proj", "ffn_down", + "mlp.up_proj", "ffn_up", + "lora_A.weight", "weight.lora_a", + "lora_B.weight", "weight.lora_b", + "lora_a", "weight.lora_a", + "lora_b", "weight.lora_b", + } +} + +func (p *llamaAdapter) repack(name string, data []float32, shape []uint64) ([]float32, error) { + dims := []int{int(shape[1]), int(shape[0])} + + var heads uint32 + if strings.HasSuffix(name, "attn_q.weight.lora_a") { + heads = p.NumAttentionHeads + } else if strings.HasSuffix(name, "attn_k.weight.lora_a") { + heads = cmp.Or(p.NumKeyValueHeads, p.NumAttentionHeads) + } else { + return data, nil + } + + n := tensor.New(tensor.WithShape(dims...), tensor.WithBacking(data)) + + if err := n.Reshape(append([]int{int(heads), 2, dims[0] / int(heads) / 2}, dims[1:]...)...); err != nil { + return nil, err + } + + if err := n.T(0, 2, 1, 3); err != nil { + return nil, err + } + + if err := n.Reshape(dims...); err != nil { + return nil, err + } + + if err := n.Transpose(); err != nil { + return nil, err + } + + ts, err := native.SelectF32(n, 1) + if err != nil { + return nil, err + } + + var f32s []float32 + for _, t := range ts { + f32s = append(f32s, t...) + } + + return f32s, nil +} + +func (p *llamaAdapter) repackAndTranspose(name string, data []float32, shape []uint64) ([]float32, error) { + dims := []int{int(shape[1]), int(shape[0])} + + n := tensor.New(tensor.WithShape(dims...), tensor.WithBacking(data)) + + var heads uint32 + if strings.HasSuffix(name, "attn_q.weight.lora_a") { + heads = p.NumAttentionHeads + } else if strings.HasSuffix(name, "attn_k.weight.lora_a") { + heads = cmp.Or(p.NumKeyValueHeads, p.NumAttentionHeads) + } + + if heads > 0 { + if err := n.Reshape(append([]int{int(heads), 2, dims[0] / int(heads) / 2}, dims[1:]...)...); err != nil { + return nil, err + } + + if err := n.T(0, 2, 1, 3); err != nil { + return nil, err + } + + if err := n.Reshape(dims...); err != nil { + return nil, err + } + + if err := n.Transpose(); err != nil { + return nil, err + } + } + + if err := n.T(1, 0); err != nil { + return nil, err + } + + if err := n.Reshape(dims...); err != nil { + return nil, err + } + + if err := n.Transpose(); err != nil { + return nil, err + } + + ts, err := native.SelectF32(n, 1) + if err != nil { + return nil, err + } + + var f32s []float32 + for _, t := range ts { + f32s = append(f32s, t...) + } + + return f32s, nil +} diff --git a/convert/convert_mistral.go b/convert/convert_mistral.go new file mode 100644 index 0000000..a6fd4c4 --- /dev/null +++ b/convert/convert_mistral.go @@ -0,0 +1,190 @@ +package convert + +import ( + "cmp" + "fmt" + "strings" + + "github.com/pdevine/tensor" + "github.com/pdevine/tensor/native" + + "github.com/ollama/ollama/fs/ggml" +) + +type mistral3Model struct { + ModelParameters + ImageTokenIndex uint32 `json:"image_token_index"` + SpatialMergeSize uint32 `json:"spatial_merge_size"` + VisionFeatureLayer int32 `json:"vision_feature_layer"` + TextModel struct { + NumHiddenLayers uint32 `json:"num_hidden_layers"` + MaxPositionEmbeddings uint32 `json:"max_position_embeddings"` + HiddenSize uint32 `json:"hidden_size"` + IntermediateSize uint32 `json:"intermediate_size"` + NumAttentionHeads uint32 `json:"num_attention_heads"` + NumKeyValueHeads uint32 `json:"num_key_value_heads"` + RopeTheta float32 `json:"rope_theta"` + RMSNormEPS float32 `json:"rms_norm_eps"` + HeadDim uint32 `json:"head_dim"` + SlidingWindow *uint32 `json:"sliding_window"` + HiddenAct string `json:"hidden_act"` + VocabSize uint32 `json:"vocab_size"` + } `json:"text_config"` + VisionModel struct { + NumAttentionHeads uint32 `json:"num_attention_heads"` + NumHiddenLayers uint32 `json:"num_hidden_layers"` + HiddenSize uint32 `json:"hidden_size"` + IntermediateSize uint32 `json:"intermediate_size"` + ImageSize uint32 `json:"image_size"` + NumChannels uint32 `json:"num_channels"` + PatchSize uint32 `json:"patch_size"` + HeadDim uint32 `json:"head_dim"` + HiddenAct string `json:"hidden_act"` + RopeTheta float32 `json:"rope_theta"` + } `json:"vision_config"` + MultiModalProjectorBias bool `json:"multimodal_projector_bias"` + ProjectorHiddenAct string `json:"projector_hidden_act"` +} + +func (p *mistral3Model) KV(t *Tokenizer) ggml.KV { + kv := p.ModelParameters.KV(t) + kv["general.architecture"] = "mistral3" + kv["mistral3.vocab_size"] = p.TextModel.VocabSize + + // Text configuration + kv["mistral3.block_count"] = p.TextModel.NumHiddenLayers + kv["mistral3.context_length"] = p.TextModel.MaxPositionEmbeddings + kv["mistral3.embedding_length"] = p.TextModel.HiddenSize + kv["mistral3.feed_forward_length"] = p.TextModel.IntermediateSize + kv["mistral3.attention.head_count"] = p.TextModel.NumAttentionHeads + kv["mistral3.attention.head_count_kv"] = p.TextModel.NumKeyValueHeads + kv["mistral3.attention.layer_norm_rms_epsilon"] = p.TextModel.RMSNormEPS + kv["mistral3.attention.key_length"] = p.TextModel.HeadDim + kv["mistral3.attention.value_length"] = p.TextModel.HeadDim + kv["mistral3.rope.dimension_count"] = p.TextModel.HiddenSize / p.TextModel.NumHiddenLayers + kv["mistral3.rope.freq_base"] = p.TextModel.RopeTheta + + // Vision configuration + kv["mistral3.vision.block_count"] = p.VisionModel.NumHiddenLayers + kv["mistral3.vision.embedding_length"] = p.VisionModel.HiddenSize + kv["mistral3.vision.feed_forward_length"] = p.VisionModel.IntermediateSize + kv["mistral3.vision.attention.head_count"] = p.VisionModel.NumAttentionHeads + kv["mistral3.vision.attention.key_length"] = p.VisionModel.HeadDim + kv["mistral3.vision.image_size"] = p.VisionModel.ImageSize + kv["mistral3.vision.patch_size"] = p.VisionModel.PatchSize + kv["mistral3.vision.num_channels"] = p.VisionModel.NumChannels + // kv["mistral3.vision.attention.layer_norm_epsilon"] = 1e-05 // Default value + kv["mistral3.vision.rope.freq_base"] = p.VisionModel.RopeTheta + + // Multimodal configuration + kv["mistral3.image_token_index"] = p.ImageTokenIndex + kv["mistral3.spatial_merge_size"] = p.SpatialMergeSize + + kv["mistral3.mm.projector_bias"] = p.MultiModalProjectorBias + + if p.ProjectorHiddenAct != "" { + kv["mistral3.mm.projector_hidden_act"] = p.ProjectorHiddenAct + } + + return kv +} + +func (p *mistral3Model) Tensors(ts []Tensor) []*ggml.Tensor { + var out []*ggml.Tensor + + for _, t := range ts { + if !strings.HasPrefix(t.Name(), "v.") { + if strings.HasSuffix(t.Name(), ".attn_q.weight") || + strings.HasSuffix(t.Name(), ".attn_k.weight") { + t.SetRepacker(p.repack) + } + } + + out = append(out, &ggml.Tensor{ + Name: t.Name(), + Kind: t.Kind(), + Shape: t.Shape(), + WriterTo: t, + }) + } + + return out +} + +func (p *mistral3Model) Replacements() []string { + return []string{ + "language_model.model.norm", "output_norm", + "language_model.model.", "", + "language_model.", "", + "layers", "blk", + "transformer.layers", "blk", + "vision_tower", "v", + "ln_pre", "encoder_norm", + "input_layernorm", "attn_norm", + "post_attention_layernorm", "ffn_norm", + "embed_tokens", "token_embd", + "self_attn.q_proj", "attn_q", + "self_attn.k_proj", "attn_k", + "self_attn.v_proj", "attn_v", + "self_attn.o_proj", "attn_output", + "mlp.down_proj", "ffn_down", + "mlp.gate_proj", "ffn_gate", + "mlp.up_proj", "ffn_up", + "attention.q_proj", "attn_q", + "attention.k_proj", "attn_k", + "attention.v_proj", "attn_v", + "attention.o_proj", "attn_output", + "attention_norm", "attn_norm", + "feed_forward.gate_proj", "ffn_gate", + "feed_forward.down_proj", "ffn_down", + "feed_forward.up_proj", "ffn_up", + "multi_modal_projector", "mm", + "ffn_norm", "ffn_norm", + "lm_head", "output", + } +} + +func (p *mistral3Model) repack(name string, data []float32, shape []uint64) ([]float32, error) { + var dims []int + for _, dim := range shape { + dims = append(dims, int(dim)) + } + + var heads uint32 + if strings.HasSuffix(name, ".attn_q.weight") { + heads = p.TextModel.NumAttentionHeads + } else if strings.HasSuffix(name, ".attn_k.weight") { + heads = cmp.Or(p.TextModel.NumKeyValueHeads, p.TextModel.NumAttentionHeads) + } else { + return nil, fmt.Errorf("unknown tensor for repack: %s", name) + } + + n := tensor.New(tensor.WithShape(dims...), tensor.WithBacking(data)) + if err := n.Reshape(append([]int{int(heads), 2, dims[0] / int(heads) / 2}, dims[1:]...)...); err != nil { + return nil, err + } + + if err := n.T(0, 2, 1, 3); err != nil { + return nil, err + } + + if err := n.Reshape(dims...); err != nil { + return nil, err + } + + if err := n.Transpose(); err != nil { + return nil, err + } + + ts, err := native.SelectF32(n, 1) + if err != nil { + return nil, err + } + + var f32s []float32 + for _, t := range ts { + f32s = append(f32s, t...) + } + + return f32s, nil +} diff --git a/convert/convert_mixtral.go b/convert/convert_mixtral.go new file mode 100644 index 0000000..17580ff --- /dev/null +++ b/convert/convert_mixtral.go @@ -0,0 +1,94 @@ +package convert + +import ( + "fmt" + "io" + "slices" + "strings" + + "github.com/ollama/ollama/fs/ggml" +) + +type mixtralModel struct { + llamaModel + NumLocalExperts uint32 `json:"num_local_experts"` + NumExpertsPerToken uint32 `json:"num_experts_per_tok"` +} + +func (p *mixtralModel) KV(t *Tokenizer) ggml.KV { + kv := p.llamaModel.KV(t) + + if p.NumLocalExperts > 0 { + kv["llama.expert_count"] = p.NumLocalExperts + } + + if p.NumExpertsPerToken > 0 { + kv["llama.expert_used_count"] = p.NumExpertsPerToken + } + + return kv +} + +func (p *mixtralModel) Tensors(ts []Tensor) []*ggml.Tensor { + oldnew := []string{ + "model.layers", "blk", + "w1", "ffn_gate_exps", + "w2", "ffn_down_exps", + "w3", "ffn_up_exps", + } + + for i := range p.NumLocalExperts { + oldnew = append(oldnew, fmt.Sprintf(".block_sparse_moe.experts.%d.", i), ".") + } + + // group experts of the same layer (model.layers.%d) and type (w[123]) into a single tensor + namer := strings.NewReplacer(oldnew...) + experts := make(map[string]experts) + + // merge experts into a single tensor while removing them from ts + ts = slices.DeleteFunc(ts, func(t Tensor) bool { + if !strings.Contains(t.Name(), ".block_sparse_moe.experts.") { + return false + } + + name := namer.Replace(t.Name()) + experts[name] = append(experts[name], t) + return true + }) + + var out []*ggml.Tensor + for n, e := range experts { + // TODO(mxyng): sanity check experts + out = append(out, &ggml.Tensor{ + Name: n, + Kind: e[0].Kind(), + Shape: append([]uint64{uint64(len(e))}, e[0].Shape()...), + WriterTo: e, + }) + } + + return append(out, p.llamaModel.Tensors(ts)...) +} + +func (p *mixtralModel) Replacements() []string { + return append( + p.llamaModel.Replacements(), + "block_sparse_moe.gate", "ffn_gate_inp", + ) +} + +type experts []Tensor + +func (e experts) WriteTo(w io.Writer) (int64, error) { + // TODO(mxyng): experts _should_ be numerically sorted by expert but this should check + for _, t := range e { + // the canonical merged experts tensor stacks all experts along a new, 0 axis, + // e.g. `tensor.Stack(0, e[0], e[1:]...)`, which requires allocating temporary buffers + // this accomplishes the same thing by writing each expert tensor in sequence + if _, err := t.WriteTo(w); err != nil { + return 0, err + } + } + + return 0, nil +} diff --git a/convert/convert_mllama.go b/convert/convert_mllama.go new file mode 100644 index 0000000..12478be --- /dev/null +++ b/convert/convert_mllama.go @@ -0,0 +1,160 @@ +package convert + +import ( + "strings" + + "github.com/ollama/ollama/fs/ggml" + "github.com/pdevine/tensor" + "github.com/pdevine/tensor/native" +) + +type mllamaModel struct { + ModelParameters + TextModel struct { + llamaModel + + CrossAttentionLayers []int32 `json:"cross_attention_layers"` + } `json:"text_config"` + VisionModel struct { + NumHiddenLayers uint32 `json:"num_hidden_layers"` + NumGlobalLayers uint32 `json:"num_global_layers"` + IntermediateLayersIndices []int32 `json:"intermediate_layers_indices"` + + HiddenSize uint32 `json:"hidden_size"` + IntermediateSize uint32 `json:"intermediate_size"` + + AttentionHeads uint32 `json:"attention_heads"` + + ImageSize uint32 `json:"image_size"` + PatchSize uint32 `json:"patch_size"` + NumChannels uint32 `json:"num_channels"` + MaxNumTiles uint32 `json:"max_num_tiles"` + NormEpsilon float32 `json:"norm_eps"` + RopeTheta float32 `json:"rope.freq_base"` + } `json:"vision_config"` +} + +func (m *mllamaModel) KV(t *Tokenizer) ggml.KV { + kv := m.ModelParameters.KV(t) + kv["general.architecture"] = "mllama" + + for k, v := range m.TextModel.KV(t) { + if strings.HasPrefix(k, "llama.") { + kv[strings.ReplaceAll(k, "llama.", "mllama.")] = v + } + } + + kv["mllama.attention.cross_attention_layers"] = m.TextModel.CrossAttentionLayers + + kv["mllama.vision.block_count"] = m.VisionModel.NumHiddenLayers + kv["mllama.vision.global.block_count"] = m.VisionModel.NumGlobalLayers + kv["mllama.vision.intermediate_layers_indices"] = m.VisionModel.IntermediateLayersIndices + + kv["mllama.vision.embedding_length"] = m.VisionModel.HiddenSize + kv["mllama.vision.feed_forward_length"] = m.VisionModel.IntermediateSize + + kv["mllama.vision.attention.head_count"] = m.VisionModel.AttentionHeads + kv["mllama.vision.attention.layer_norm_epsilon"] = m.VisionModel.NormEpsilon + + kv["mllama.vision.image_size"] = m.VisionModel.ImageSize + kv["mllama.vision.patch_size"] = m.VisionModel.PatchSize + kv["mllama.vision.max_num_tiles"] = m.VisionModel.MaxNumTiles + kv["mllama.vision.num_channels"] = m.VisionModel.NumChannels + + return kv +} + +func (m *mllamaModel) Replacements() []string { + return append( + m.TextModel.Replacements(), + "language_model.", "", + "gate_attn", "attn_gate", + "gate_ffn", "ffn_gate", + "cross_attn.", "cross_attn_", + "vision_model", "v", + "class_embedding", "class_embd", + "patch_embedding", "patch_embd", + "gated_positional_embedding.tile_embedding", "tile_position_embd", + "gated_positional_embedding.embedding", "position_embd.weight", + "gated_positional_embedding", "position_embd", + "embedding.weight", "weight", + "pre_tile_positional_embedding", "pre_tile_position_embd", + "post_tile_positional_embedding", "post_tile_position_embd", + "layernorm_pre", "pre_ln", + "layernorm_post", "post_ln", + "global_transformer.layers", "global.blk", + "transformer.layers", "blk", + "mlp.fc1", "ffn_up", + "mlp.fc2", "ffn_down", + "multi_modal_projector", "mm.0", + ) +} + +func (m *mllamaModel) Tensors(ts []Tensor) []*ggml.Tensor { + var out []*ggml.Tensor + var text []Tensor + for _, t := range ts { + if t.Name() == "v.position_embd.gate" { + for _, name := range []string{"v.position_embd.gate", "v.tile_position_embd.gate"} { + tt := t.Clone() + tt.SetRepacker(m.repack(name)) + out = append(out, &ggml.Tensor{ + Name: name, + Kind: t.Kind(), + Shape: t.Shape(), + WriterTo: tt, + }) + } + } else if t.Name() == "v.pre_tile_position_embd.gate" || t.Name() == "v.post_tile_position_embd.gate" { + t.SetRepacker(m.repack(t.Name())) + out = append(out, &ggml.Tensor{ + Name: t.Name(), + Kind: t.Kind(), + Shape: t.Shape(), + WriterTo: t, + }) + } else if strings.HasPrefix(t.Name(), "v.") || strings.HasPrefix(t.Name(), "mm.") { + out = append(out, &ggml.Tensor{ + Name: t.Name(), + Kind: t.Kind(), + Shape: t.Shape(), + WriterTo: t, + }) + } else { + text = append(text, t) + } + } + + return append(out, m.TextModel.Tensors(text)...) +} + +func (m *mllamaModel) repack(name string) Repacker { + return func(_ string, data []float32, shape []uint64) (_ []float32, err error) { + dims := make([]int, len(shape)) + for i, dim := range shape { + dims[i] = int(dim) + } + + var t tensor.Tensor = tensor.New(tensor.WithShape(dims...), tensor.WithBacking(data)) + + t, err = tensor.Tanh(t) + if err != nil { + return nil, err + } + + if name == "v.position_embd.gate" { + t, err = tensor.Sub(float32(1), t) + if err != nil { + return nil, err + } + } + + t = tensor.Materialize(t) + // flatten tensor so it can be return as a vector + if err := t.Reshape(t.Shape().TotalSize()); err != nil { + return nil, err + } + + return native.VectorF32(t.(*tensor.Dense)) + } +} diff --git a/convert/convert_phi3.go b/convert/convert_phi3.go new file mode 100644 index 0000000..5a67560 --- /dev/null +++ b/convert/convert_phi3.go @@ -0,0 +1,122 @@ +package convert + +import ( + "cmp" + "encoding/binary" + "io" + "math" + "strings" + "sync" + + "github.com/ollama/ollama/fs/ggml" +) + +type phi3Model struct { + ModelParameters + NumHiddenLayers uint32 `json:"num_hidden_layers"` + NLayers uint32 `json:"n_layers"` + HiddenSize uint32 `json:"hidden_size"` + NEmbd uint32 `json:"n_embd"` + IntermediateSize uint32 `json:"intermediate_size"` + NumAttentionHeads uint32 `json:"num_attention_heads"` + NHead uint32 `json:"n_head"` + NumKeyValueHeads uint32 `json:"num_key_value_heads"` + NHeadKV uint32 `json:"n_head_kv"` + RopeTheta float32 `json:"rope_theta"` + RopeScaling struct { + Type string `json:"type"` + LongFactor ropeFactor `json:"long_factor"` + ShortFactor ropeFactor `json:"short_factor"` + } `json:"rope_scaling"` + RMSNormEPS float32 `json:"rms_norm_eps"` + NPositions uint32 `json:"n_positions"` + MaxPositionEmbeddings uint32 `json:"max_position_embeddings"` + OriginalMaxPositionEmbeddings uint32 `json:"original_max_position_embeddings"` + SlidingWindow uint32 `json:"sliding_window"` +} + +var _ ModelConverter = (*phi3Model)(nil) + +func (p *phi3Model) KV(t *Tokenizer) ggml.KV { + kv := p.ModelParameters.KV(t) + kv["general.architecture"] = "phi3" + kv["phi3.context_length"] = p.MaxPositionEmbeddings + kv["phi3.embedding_length"] = cmp.Or(p.HiddenSize, p.NEmbd) + kv["phi3.feed_forward_length"] = p.IntermediateSize + kv["phi3.block_count"] = cmp.Or(p.NumHiddenLayers, p.NLayers) + kv["phi3.attention.head_count"] = cmp.Or(p.NumAttentionHeads, p.NHead) + kv["phi3.attention.head_count_kv"] = cmp.Or(p.NumKeyValueHeads, p.NHeadKV) + kv["phi3.attention.layer_norm_rms_epsilon"] = p.RMSNormEPS + kv["phi3.rope.dimension_count"] = p.HiddenSize / cmp.Or(p.NumAttentionHeads, p.NHead) + kv["phi3.rope.freq_base"] = p.RopeTheta + kv["phi3.rope.scaling.original_context_length"] = p.OriginalMaxPositionEmbeddings + kv["phi3.attention.sliding_window"] = p.SlidingWindow + + scale := float64(p.MaxPositionEmbeddings) / float64(p.OriginalMaxPositionEmbeddings) + + switch p.RopeScaling.Type { + case "": + // no scaling + case "su", "longrope": + kv["phi3.rope.scaling.attn_factor"] = float32(max(math.Sqrt(1+math.Log(scale)/math.Log(float64(p.OriginalMaxPositionEmbeddings))), 1.0)) + case "yarn": + kv["phi3.rope.scaling.attn_factor"] = float32(max(0.1*math.Log(scale)+1.0, 1.0)) + default: + panic("unknown rope scaling type") + } + + return kv +} + +func (p *phi3Model) Tensors(ts []Tensor) []*ggml.Tensor { + var addRopeFactors sync.Once + + out := make([]*ggml.Tensor, 0, len(ts)+2) + for _, t := range ts { + if strings.HasPrefix(t.Name(), "blk.0.") { + addRopeFactors.Do(func() { + out = append(out, &ggml.Tensor{ + Name: "rope_factors_long.weight", + Kind: 0, + Shape: []uint64{uint64(len(p.RopeScaling.LongFactor))}, + WriterTo: p.RopeScaling.LongFactor, + }, &ggml.Tensor{ + Name: "rope_factors_short.weight", + Kind: 0, + Shape: []uint64{uint64(len(p.RopeScaling.ShortFactor))}, + WriterTo: p.RopeScaling.ShortFactor, + }) + }) + } + + out = append(out, &ggml.Tensor{ + Name: t.Name(), + Kind: t.Kind(), + Shape: t.Shape(), + WriterTo: t, + }) + } + + return out +} + +func (p *phi3Model) Replacements() []string { + return []string{ + "lm_head", "output", + "model.embed_tokens", "token_embd", + "model.norm", "output_norm", + "model.layers", "blk", + "input_layernorm", "attn_norm", + "self_attn.qkv_proj", "attn_qkv", + "self_attn.o_proj", "attn_output", + "mlp.down_proj", "ffn_down", + "mlp.gate_up_proj", "ffn_up", + "post_attention_layernorm", "ffn_norm", + } +} + +type ropeFactor []float32 + +func (r ropeFactor) WriteTo(w io.Writer) (int64, error) { + return 0, binary.Write(w, binary.LittleEndian, r) +} diff --git a/convert/convert_qwen2.go b/convert/convert_qwen2.go new file mode 100644 index 0000000..3647c4e --- /dev/null +++ b/convert/convert_qwen2.go @@ -0,0 +1,81 @@ +package convert + +import "github.com/ollama/ollama/fs/ggml" + +type qwen2Model struct { + ModelParameters + MaxPositionEmbeddings uint32 `json:"max_position_embeddings"` + HiddenSize uint32 `json:"hidden_size"` + HiddenLayers uint32 `json:"num_hidden_layers"` + IntermediateSize uint32 `json:"intermediate_size"` + NumAttentionHeads uint32 `json:"num_attention_heads"` + NumKeyValueHeads uint32 `json:"num_key_value_heads"` + RopeTheta float32 `json:"rope_theta"` + RopeScaling struct { + Type string `json:"type"` + Factor ropeFactor `json:"factor"` + OriginalMaxPositionEmbeddings uint32 `json:"original_max_position_embeddings"` + MropeSection []int32 `json:"mrope_section"` + } `json:"rope_scaling"` + RMSNormEPS float32 `json:"rms_norm_eps"` +} + +var _ ModelConverter = (*qwen2Model)(nil) + +func (q *qwen2Model) KV(t *Tokenizer) ggml.KV { + kv := q.ModelParameters.KV(t) + kv["general.architecture"] = "qwen2" + kv["qwen2.block_count"] = q.HiddenLayers + kv["qwen2.context_length"] = q.MaxPositionEmbeddings + kv["qwen2.embedding_length"] = q.HiddenSize + kv["qwen2.feed_forward_length"] = q.IntermediateSize + kv["qwen2.attention.head_count"] = q.NumAttentionHeads + kv["qwen2.attention.head_count_kv"] = q.NumKeyValueHeads + kv["qwen2.rope.freq_base"] = q.RopeTheta + kv["qwen2.attention.layer_norm_rms_epsilon"] = q.RMSNormEPS + + switch q.RopeScaling.Type { + case "": + // no scaling + case "yarn": + kv["qwen2.rope.scaling.type"] = q.RopeScaling.Type + kv["qwen2.rope.scaling.factor"] = q.RopeScaling.Factor + case "mrope", "default": + kv["qwen2.rope.mrope_section"] = q.RopeScaling.MropeSection + default: + panic("unknown rope scaling type") + } + return kv +} + +func (q *qwen2Model) Tensors(ts []Tensor) []*ggml.Tensor { + var out []*ggml.Tensor + for _, t := range ts { + out = append(out, &ggml.Tensor{ + Name: t.Name(), + Kind: t.Kind(), + Shape: t.Shape(), + WriterTo: t, + }) + } + + return out +} + +func (p *qwen2Model) Replacements() []string { + return []string{ + "lm_head", "output", + "model.embed_tokens", "token_embd", + "model.layers", "blk", + "input_layernorm", "attn_norm", + "self_attn.k_proj", "attn_k", + "self_attn.v_proj", "attn_v", + "self_attn.q_proj", "attn_q", + "self_attn.o_proj", "attn_output", + "mlp.down_proj", "ffn_down", + "mlp.gate_proj", "ffn_gate", + "mlp.up_proj", "ffn_up", + "post_attention_layernorm", "ffn_norm", + "model.norm", "output_norm", + } +} diff --git a/convert/convert_qwen25vl.go b/convert/convert_qwen25vl.go new file mode 100644 index 0000000..c2d5a63 --- /dev/null +++ b/convert/convert_qwen25vl.go @@ -0,0 +1,102 @@ +package convert + +import ( + "cmp" + "slices" + "strings" + + "github.com/ollama/ollama/fs/ggml" +) + +type qwen25VLModel struct { + qwen2Model + + VisionModel struct { + Depth uint32 `json:"depth"` + HiddenSize uint32 `json:"hidden_size"` + NumHeads uint32 `json:"num_heads"` + InChannels uint32 `json:"in_chans"` + PatchSize uint32 `json:"patch_size"` + SpatialMergeSize uint32 `json:"spatial_merge_size"` + SpatialPatchSize uint32 `json:"spatial_patch_size"` + WindowSize uint32 `json:"window_size"` + RMSNormEps float32 `json:"layer_norm_epsilon"` + RopeTheta float32 `json:"rope_theta"` + FullAttentionBlocks []int32 `json:"fullatt_block_indexes"` + TemporalPatchSize uint32 `json:"temporal_patch_size"` + } `json:"vision_config"` +} + +var _ ModelConverter = (*qwen25VLModel)(nil) + +func (q *qwen25VLModel) KV(t *Tokenizer) ggml.KV { + kv := q.ModelParameters.KV(t) + kv["general.architecture"] = "qwen25vl" + + for k, v := range q.qwen2Model.KV(t) { + if strings.HasPrefix(k, "qwen2.") { + kv[strings.Replace(k, "qwen2.", "qwen25vl.", 1)] = v + } + } + + if q.VisionModel.FullAttentionBlocks == nil { + kv["qwen25vl.vision.fullatt_block_indexes"] = []int32{7, 15, 23, 31} + } + + kv["qwen25vl.vision.block_count"] = cmp.Or(q.VisionModel.Depth, 32) + kv["qwen25vl.vision.embedding_length"] = q.VisionModel.HiddenSize + kv["qwen25vl.vision.attention.head_count"] = cmp.Or(q.VisionModel.NumHeads, 16) + kv["qwen25vl.vision.num_channels"] = q.VisionModel.InChannels + kv["qwen25vl.vision.patch_size"] = cmp.Or(q.VisionModel.PatchSize, 14) + kv["qwen25vl.vision.spatial_merge_size"] = cmp.Or(q.VisionModel.SpatialMergeSize, 2) + kv["qwen25vl.vision.spatial_patch_size"] = q.VisionModel.SpatialPatchSize + kv["qwen25vl.vision.window_size"] = cmp.Or(q.VisionModel.WindowSize, 112) + kv["qwen25vl.vision.attention.layer_norm_epsilon"] = cmp.Or(q.VisionModel.RMSNormEps, 1e-6) + kv["qwen25vl.vision.rope.freq_base"] = cmp.Or(q.VisionModel.RopeTheta, 1e4) + kv["qwen25vl.vision.fullatt_block_indexes"] = q.VisionModel.FullAttentionBlocks + kv["qwen25vl.vision.temporal_patch_size"] = cmp.Or(q.VisionModel.TemporalPatchSize, 2) + + return kv +} + +func (q *qwen25VLModel) Tensors(ts []Tensor) []*ggml.Tensor { + var out []*ggml.Tensor + + for _, t := range ts { + if strings.Contains(t.Name(), "patch_embed.proj") { + for t := range splitDim(t, 2, + strings.NewReplacer("patch_embed.proj", "patch_embd_0"), + strings.NewReplacer("patch_embed.proj", "patch_embd_1"), + ) { + t.Shape = slices.DeleteFunc(t.Shape, func(i uint64) bool { return i == 1 }) + out = append(out, t) + } + } else if strings.Contains(t.Name(), "attn.qkv") { + out = append(out, slices.Collect(splitDim(t, 0, + strings.NewReplacer("attn.qkv", "attn_q"), + strings.NewReplacer("attn.qkv", "attn_k"), + strings.NewReplacer("attn.qkv", "attn_v"), + ))...) + } else { + out = append(out, &ggml.Tensor{ + Name: t.Name(), + Kind: t.Kind(), + Shape: t.Shape(), + WriterTo: t, + }) + } + } + + return out +} + +func (p *qwen25VLModel) Replacements() []string { + return append( + p.qwen2Model.Replacements(), + "visual", "v", + "blocks", "blk", + "attn.proj", "attn_out", + "norm1", "ln1", + "norm2", "ln2", + ) +} diff --git a/convert/convert_test.go b/convert/convert_test.go new file mode 100644 index 0000000..105fbb3 --- /dev/null +++ b/convert/convert_test.go @@ -0,0 +1,478 @@ +package convert + +import ( + "bytes" + "crypto/sha256" + "encoding/binary" + "encoding/hex" + "encoding/json" + "flag" + "fmt" + "io" + "io/fs" + "log/slog" + "os" + "path/filepath" + "slices" + "strings" + "testing" + + "golang.org/x/exp/maps" + + "github.com/ollama/ollama/fs/ggml" +) + +type tensorData struct { + Offsets []int `json:"data_offsets"` + Type string `json:"dtype"` + Shape []int `json:"shape"` +} + +func convertFull(t *testing.T, fsys fs.FS) (*os.File, ggml.KV, ggml.Tensors) { + t.Helper() + + f, err := os.CreateTemp(t.TempDir(), "f16") + if err != nil { + t.Fatal(err) + } + defer f.Close() + + if err := ConvertModel(fsys, f); err != nil { + t.Fatal(err) + } + + r, err := os.Open(f.Name()) + if err != nil { + t.Fatal(err) + } + t.Cleanup(func() { r.Close() }) + + m, err := ggml.Decode(r, -1) + if err != nil { + t.Fatal(err) + } + + if _, err := r.Seek(0, io.SeekStart); err != nil { + t.Fatal(err) + } + + return r, m.KV(), m.Tensors() +} + +func generateResultsJSON(t *testing.T, f *os.File, kv ggml.KV, tensors ggml.Tensors) map[string]string { + actual := make(map[string]string) + for k, v := range kv { + if s, ok := v.(json.Marshaler); !ok { + actual[k] = fmt.Sprintf("%v", v) + } else { + bts, err := json.Marshal(s) + if err != nil { + t.Fatal(err) + } + + actual[k] = fmt.Sprintf("%x", sha256.Sum256(bts)) + } + } + + for _, tensor := range tensors.Items() { + sha256sum := sha256.New() + sr := io.NewSectionReader(f, int64(tensors.Offset+tensor.Offset), int64(tensor.Size())) + if _, err := io.Copy(sha256sum, sr); err != nil { + t.Fatal(err) + } + + actual[tensor.Name] = hex.EncodeToString(sha256sum.Sum(nil)) + } + + return actual +} + +func TestMain(m *testing.M) { + var level slog.Level + flag.TextVar(&level, "level", slog.LevelInfo, "log level") + flag.Parse() + slog.SetLogLoggerLevel(level) + os.Exit(m.Run()) +} + +func TestConvertModel(t *testing.T) { + cases := []string{ + "Meta-Llama-3-8B-Instruct", + "Meta-Llama-3.1-8B-Instruct", + "Mistral-7B-Instruct-v0.2", + "Mixtral-8x7B-Instruct-v0.1", + "gemma-2b-it", + "gemma-2-2b-it", + // microsoft/Phi-3-mini-128-instruct@d548c233192db00165d842bf8edff054bb3212f8 + "Phi-3-mini-128k-instruct", + "all-MiniLM-L6-v2", + "gemma-2-9b-it", + "Qwen2.5-0.5B-Instruct", + "c4ai-command-r-v01", + } + + for i := range cases { + tt := cases[i] + t.Run(tt, func(t *testing.T) { + t.Parallel() + + p := filepath.Join("testdata", tt) + if testing.Short() { + t.Skip("skipping in short mode") + } else if _, err := os.Stat(p); err != nil { + t.Skipf("%s not found", p) + } + + f, kv, tensors := convertFull(t, os.DirFS(p)) + actual := generateResultsJSON(t, f, kv, tensors) + + expectFile, err := os.Open(filepath.Join("testdata", fmt.Sprintf("%s.json", tt))) + if err != nil { + t.Fatal(err) + } + defer expectFile.Close() + + var expect map[string]string + if err := json.NewDecoder(expectFile).Decode(&expect); err != nil { + t.Fatal(err) + } + + keys := maps.Keys(expect) + slices.Sort(keys) + for _, k := range keys { + if v, ok := actual[k]; !ok { + t.Errorf("missing %s", k) + } else if v != expect[k] { + t.Errorf("unexpected %s: want %s, got %s", k, expect[k], v) + } + } + }) + } +} + +func TestConvertInvalidTensorNames(t *testing.T) { + f, err := os.CreateTemp(t.TempDir(), "testmodel") + if err != nil { + t.Fatal(err) + } + defer f.Close() + + tempDir := t.TempDir() + + td := map[string]*tensorData{} + offset := 4096 + + td["model.layers.0.self_attn.q_proj.weight"] = &tensorData{ + Offsets: []int{0, offset}, + Type: "F32", + Shape: []int{4096, 4096}, + } + td["blk.0.attn_q.weight"] = &tensorData{ + Offsets: []int{offset, offset * 2}, + Type: "F32", + Shape: []int{4096, 4096}, + } + generateSafetensorTestData(t, tempDir, td) + + err = ConvertModel(os.DirFS(tempDir), f) + if err == nil || !strings.HasPrefix(err.Error(), "duplicate tensor name") { + t.Errorf("expected error but didn't get one") + } +} + +func TestConvertInvalidDatatype(t *testing.T) { + f, err := os.CreateTemp(t.TempDir(), "testmodel") + if err != nil { + t.Fatal(err) + } + defer f.Close() + + tempDir := t.TempDir() + + td := map[string]*tensorData{} + offset := 4096 * 14336 + + td["model.layers.0.mlp.down_proj.weight"] = &tensorData{ + Offsets: []int{0, offset}, + Type: "I8", + Shape: []int{4096, 14336}, + } + td["model.layers.0.mlp.down_proj.weight_format"] = &tensorData{ + Offsets: []int{offset, offset}, + Type: "U8", + Shape: []int{}, + } + generateSafetensorTestData(t, tempDir, td) + + err = ConvertModel(os.DirFS(tempDir), f) + if err == nil || err.Error() != "unsupported safetensors model" { + t.Errorf("expected error but didn't get one") + } +} + +func generateSafetensorTestData(t *testing.T, tempDir string, tensorData map[string]*tensorData) { + data, err := json.Marshal(tensorData) + if err != nil { + t.Fatal(err) + } + + var buf bytes.Buffer + + l := int64(len(data)) + err = binary.Write(&buf, binary.LittleEndian, l) + if err != nil { + t.Fatal(err) + } + + _, err = buf.Write(data) + if err != nil { + t.Fatal(err) + } + + fdata, err := os.Create(filepath.Join(tempDir, "model-00001-of-00001.safetensors")) + if err != nil { + t.Fatal(err) + } + defer fdata.Close() + + _, err = fdata.Write(buf.Bytes()) + if err != nil { + t.Fatal(err) + } + + configData := ` +{ + "architectures": [ + "LlamaForCausalLM" + ] +} +` + + f, err := os.Create(filepath.Join(tempDir, "config.json")) + if err != nil { + t.Fatal(err) + } + defer f.Close() + + _, err = f.WriteString(configData) + if err != nil { + t.Fatal(err) + } + + tokenizerData := ` +{ +} +` + + f, err = os.Create(filepath.Join(tempDir, "tokenizer.json")) + if err != nil { + t.Fatal(err) + } + defer f.Close() + + _, err = f.WriteString(tokenizerData) + if err != nil { + t.Fatal(err) + } +} + +func TestConvertAdapter(t *testing.T) { + type AdapterCase struct { + Name string + BaseKV map[string]any + Expected map[string]string + } + + cases := []AdapterCase{ + { + Name: "discollama", + BaseKV: map[string]any{ + "general.architecture": "llama", + "llama.attention.head_count": uint32(32), + "llama.attention.head_count_kv": uint32(8), + }, + Expected: map[string]string{ + "general.architecture": "llama", + "general.file_type": "1", + "general.parameter_count": "106496", + "general.type": "adapter", + "general.version": "v0.2", + "adapter.lora.alpha": "16", + "adapter.type": "lora", + "llama.attention.head_count": "32", + "llama.attention.head_count_kv": "8", + "blk.31.attn_q.weight.lora_a": "0eb3318b02cd313429bcc7621b539fdbb10240fea190c56c9e5f93fcd37a4e50", + "blk.31.attn_q.weight.lora_b": "0eb3318b02cd313429bcc7621b539fdbb10240fea190c56c9e5f93fcd37a4e50", + "blk.31.attn_v.weight.lora_a": "0eb3318b02cd313429bcc7621b539fdbb10240fea190c56c9e5f93fcd37a4e50", + "blk.31.attn_v.weight.lora_b": "071dcafe89df065d6e1c935ecb8fdf6479b3c202eb912e7da938597673ff5857", + }, + }, + } + + for _, c := range cases { + t.Run(c.Name, func(t *testing.T) { + t.Parallel() + + f, err := os.CreateTemp(t.TempDir(), "f16") + if err != nil { + t.Fatal(err) + } + defer f.Close() + + tempDir := t.TempDir() + generateLoraTestData(t, tempDir) + + if err = ConvertAdapter(os.DirFS(tempDir), f, c.BaseKV); err != nil { + t.Fatal(err) + } + + r, err := os.Open(f.Name()) + if err != nil { + t.Fatal(err) + } + defer r.Close() + + m, err := ggml.Decode(r, -1) + if err != nil { + t.Fatal(err) + } + + if _, err := r.Seek(0, io.SeekStart); err != nil { + t.Fatal(err) + } + + actual := generateResultsJSON(t, r, m.KV(), m.Tensors()) + + keys := maps.Keys(c.Expected) + slices.Sort(keys) + for _, k := range keys { + if v, ok := actual[k]; !ok { + t.Errorf("missing %s", k) + } else if v != c.Expected[k] { + t.Errorf("unexpected %s: want %s, got %s", k, c.Expected[k], v) + } + } + }) + } +} + +func generateLoraTestData(t *testing.T, tempDir string) { + offset := 4096 * 8 * 4 + + td := map[string]*tensorData{"__metadata__": nil} + td["model.layers.31.self_attn.q_proj.lora_a"] = &tensorData{ + Offsets: []int{0, offset}, + Type: "F32", + Shape: []int{4096, 8}, + } + td["model.layers.31.self_attn.q_proj.lora_b"] = &tensorData{ + Offsets: []int{offset, offset * 2}, + Type: "F32", + Shape: []int{8, 4096}, + } + td["model.layers.31.self_attn.v_proj.lora_a"] = &tensorData{ + Offsets: []int{offset * 2, offset * 3}, + Type: "F32", + Shape: []int{4096, 8}, + } + td["model.layers.31.self_attn.v_proj.lora_b"] = &tensorData{ + Offsets: []int{offset * 3, offset*3 + 8*1024*4}, + Type: "F32", + Shape: []int{8, 1024}, + } + + data, err := json.Marshal(td) + if err != nil { + t.Fatal(err) + } + + var buf bytes.Buffer + + l := int64(len(data)) + err = binary.Write(&buf, binary.LittleEndian, l) + if err != nil { + t.Fatal(err) + } + + _, err = buf.Write(data) + if err != nil { + t.Fatal(err) + } + + // write some data for the tensors + + ones := make([]float32, 4096*8) + for i := range ones { + ones[i] = float32(1) + } + + for range 3 { + err = binary.Write(&buf, binary.LittleEndian, ones) + if err != nil { + t.Fatal(err) + } + } + + ones = make([]float32, 1024*8) + for i := range ones { + ones[i] = float32(1) + } + + err = binary.Write(&buf, binary.LittleEndian, ones) + if err != nil { + t.Fatal(err) + } + + fdata, err := os.Create(filepath.Join(tempDir, "adapters.safetensors")) + if err != nil { + t.Fatal(err) + } + defer fdata.Close() + + _, err = fdata.Write(buf.Bytes()) + if err != nil { + t.Fatal(err) + } + + configData := ` +{ + "adapter_path": "adapters-test", + "batch_size": 8, + "config": "config-tiny.json", + "data": "../discollama-completion", + "grad_checkpoint": null, + "iters": 1000, + "learning_rate": 1e-05, + "lora_layers": 1, + "lora_parameters": { + "rank": 8, + "alpha": 16, + "dropout": 0.0, + "scale": 2.0 + }, + "lr_schedule": null, + "max_seq_length": 2048, + "model": "/Users/pdevine/git/Meta-Llama-3-8B-Instruct", + "resume_adapter_file": null, + "save_every": 100, + "seed": 0, + "steps_per_eval": 200, + "steps_per_report": 10, + "test": false, + "test_batches": 500, + "train": true, + "use_dora": false, + "val_batches": 25 +} +` + f, err := os.Create(filepath.Join(tempDir, "adapter_config.json")) + if err != nil { + t.Fatal(err) + } + defer f.Close() + + _, err = f.WriteString(configData) + if err != nil { + t.Fatal(err) + } +} diff --git a/convert/reader.go b/convert/reader.go new file mode 100644 index 0000000..07d12f0 --- /dev/null +++ b/convert/reader.go @@ -0,0 +1,88 @@ +package convert + +import ( + "errors" + "io" + "io/fs" + "strings" +) + +type Tensor interface { + Name() string + Shape() []uint64 + Kind() uint32 + SetRepacker(Repacker) + WriteTo(io.Writer) (int64, error) + Clone() Tensor +} + +type tensorBase struct { + name string + shape []uint64 + repacker Repacker +} + +func (t tensorBase) Name() string { + return t.name +} + +func (t tensorBase) Shape() []uint64 { + return t.shape +} + +const ( + tensorKindF32 uint32 = iota + tensorKindF16 +) + +func (t tensorBase) Kind() uint32 { + if strings.HasSuffix(t.name, ".ffn_gate_inp.weight") || + t.name == "token_types.weight" || + t.name == "v.positional_embedding_vlm" || + t.name == "v.tile_position_embd.weight" || + t.name == "v.pre_tile_position_embd.weight" || + t.name == "v.post_tile_position_embd.weight" { + // these tensors are always F32 + return 0 + } + + switch len(t.shape) { + case 0: + panic("invalid tensor shape") + case 1: + return tensorKindF32 + default: + return tensorKindF16 + } +} + +func (t *tensorBase) SetRepacker(fn Repacker) { + t.repacker = fn +} + +type Repacker func(string, []float32, []uint64) ([]float32, error) + +func parseTensors(fsys fs.FS, replacer *strings.Replacer) ([]Tensor, error) { + patterns := []struct { + Pattern string + Func func(fs.FS, *strings.Replacer, ...string) ([]Tensor, error) + }{ + {"*.safetensors", parseSafetensors}, + {"pytorch_model-*-of-*.bin", parseTorch}, + {"pytorch_model.bin", parseTorch}, + {"consolidated.*.pth", parseTorch}, + } + + for _, pattern := range patterns { + matches, err := fs.Glob(fsys, pattern.Pattern) + if err != nil { + return nil, err + } + + if len(matches) > 0 { + return pattern.Func(fsys, replacer, matches...) + } + } + + return nil, errors.New("unknown tensor format") +} diff --git a/convert/reader_safetensors.go b/convert/reader_safetensors.go new file mode 100644 index 0000000..f585853 --- /dev/null +++ b/convert/reader_safetensors.go @@ -0,0 +1,178 @@ +package convert + +import ( + "bytes" + "encoding/binary" + "encoding/json" + "errors" + "fmt" + "io" + "io/fs" + "slices" + "strings" + + "github.com/d4l3k/go-bfloat16" + "github.com/x448/float16" + "golang.org/x/exp/maps" +) + +type safetensorMetadata struct { + Type string `json:"dtype"` + Shape []uint64 `json:"shape"` + Offsets []int64 `json:"data_offsets"` +} + +func parseSafetensors(fsys fs.FS, replacer *strings.Replacer, ps ...string) ([]Tensor, error) { + var ts []Tensor + for _, p := range ps { + f, err := fsys.Open(p) + if err != nil { + return nil, err + } + defer f.Close() + + var n int64 + if err := binary.Read(f, binary.LittleEndian, &n); err != nil { + return nil, err + } + + b := bytes.NewBuffer(make([]byte, 0, n)) + if _, err = io.CopyN(b, f, n); err != nil { + return nil, err + } + + var headers map[string]safetensorMetadata + if err := json.NewDecoder(b).Decode(&headers); err != nil { + return nil, err + } + + keys := maps.Keys(headers) + slices.Sort(keys) + + names := make(map[string]struct{}, len(keys)) + + for _, key := range keys { + if value := headers[key]; value.Type != "" { + // bitsandbytes quantized models are unsupported + if len(value.Shape) == 0 { + return nil, errors.New("unsupported safetensors model") + } + ggufName := replacer.Replace(key) + if _, ok := names[ggufName]; ok { + return nil, fmt.Errorf("duplicate tensor name '%s' was found for this model", ggufName) + } + names[ggufName] = struct{}{} + ts = append(ts, safetensor{ + fs: fsys, + path: p, + dtype: value.Type, + offset: safetensorsPad(n, value.Offsets[0]), + size: safetensorsPad(n, value.Offsets[1]) - safetensorsPad(n, value.Offsets[0]), + tensorBase: &tensorBase{ + name: ggufName, + shape: value.Shape, + }, + }) + } + } + } + + return ts, nil +} + +// safetensorsPad returns the padded size of the safetensors file given a length n and offset s +func safetensorsPad(n, offset int64) int64 { + return 8 + n + offset +} + +type safetensor struct { + fs fs.FS + path string + dtype string + offset int64 + size int64 + *tensorBase +} + +func (st safetensor) Clone() Tensor { + return &safetensor{ + fs: st.fs, + path: st.path, + dtype: st.dtype, + offset: st.offset, + size: st.size, + tensorBase: &tensorBase{ + name: st.name, + repacker: st.repacker, + shape: slices.Clone(st.shape), + }, + } +} + +func (st safetensor) WriteTo(w io.Writer) (int64, error) { + f, err := st.fs.Open(st.path) + if err != nil { + return 0, err + } + defer f.Close() + + if seeker, ok := f.(io.Seeker); ok { + if _, err := seeker.Seek(st.offset, io.SeekStart); err != nil { + return 0, err + } + } else { + if _, err := io.CopyN(io.Discard, f, st.offset); err != nil { + return 0, err + } + } + + var f32s []float32 + switch st.dtype { + case "F32": + f32s = make([]float32, st.size/4) + if err = binary.Read(f, binary.LittleEndian, f32s); err != nil { + return 0, err + } + case "F16": + u16s := make([]uint16, st.size/2) + if err = binary.Read(f, binary.LittleEndian, u16s); err != nil { + return 0, err + } + + f32s = make([]float32, len(u16s)) + for i := range u16s { + f32s[i] = float16.Frombits(u16s[i]).Float32() + } + + case "BF16": + u8s := make([]uint8, st.size) + if err = binary.Read(f, binary.LittleEndian, u8s); err != nil { + return 0, err + } + + f32s = bfloat16.DecodeFloat32(u8s) + default: + return 0, fmt.Errorf("unknown data type: %s", st.dtype) + } + + if st.repacker != nil { + f32s, err = st.repacker(st.Name(), f32s, st.Shape()) + if err != nil { + return 0, err + } + } + + switch st.Kind() { + case tensorKindF32: + return 0, binary.Write(w, binary.LittleEndian, f32s) + case tensorKindF16: + f16s := make([]uint16, len(f32s)) + for i := range f32s { + f16s[i] = float16.Fromfloat32(f32s[i]).Bits() + } + + return 0, binary.Write(w, binary.LittleEndian, f16s) + default: + return 0, fmt.Errorf("unknown storage type: %d", st.Kind()) + } +} diff --git a/convert/reader_torch.go b/convert/reader_torch.go new file mode 100644 index 0000000..7f6d6c8 --- /dev/null +++ b/convert/reader_torch.go @@ -0,0 +1,59 @@ +package convert + +import ( + "io" + "io/fs" + "strings" + + "github.com/nlpodyssey/gopickle/pytorch" + "github.com/nlpodyssey/gopickle/types" +) + +func parseTorch(fsys fs.FS, replacer *strings.Replacer, ps ...string) ([]Tensor, error) { + var ts []Tensor + for _, p := range ps { + pt, err := pytorch.Load(p) + if err != nil { + return nil, err + } + + for _, k := range pt.(*types.Dict).Keys() { + t := pt.(*types.Dict).MustGet(k) + + var shape []uint64 + for dim := range t.(*pytorch.Tensor).Size { + shape = append(shape, uint64(dim)) + } + + ts = append(ts, torch{ + storage: t.(*pytorch.Tensor).Source, + tensorBase: &tensorBase{ + name: replacer.Replace(k.(string)), + shape: shape, + }, + }) + } + } + + return ts, nil +} + +type torch struct { + storage pytorch.StorageInterface + *tensorBase +} + +func (t torch) Clone() Tensor { + return torch{ + storage: t.storage, + tensorBase: &tensorBase{ + name: t.name, + shape: t.shape, + repacker: t.repacker, + }, + } +} + +func (pt torch) WriteTo(w io.Writer) (int64, error) { + return 0, nil +} diff --git a/convert/sentencepiece/sentencepiece_model.pb.go b/convert/sentencepiece/sentencepiece_model.pb.go new file mode 100644 index 0000000..76d136e --- /dev/null +++ b/convert/sentencepiece/sentencepiece_model.pb.go @@ -0,0 +1,1497 @@ +// Copyright 2016 Google Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License.! + +// Code generated by protoc-gen-go. DO NOT EDIT. +// versions: +// protoc-gen-go v1.32.0 +// protoc v4.25.2 +// source: sentencepiece_model.proto + +package sentencepiece + +import ( + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" + reflect "reflect" + sync "sync" +) + +const ( + // Verify that this generated code is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) + // Verify that runtime/protoimpl is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) +) + +// Model type. only have UNIGRAM now. +type TrainerSpec_ModelType int32 + +const ( + TrainerSpec_UNIGRAM TrainerSpec_ModelType = 1 // Unigram language model with dynamic algorithm + TrainerSpec_BPE TrainerSpec_ModelType = 2 // Byte Pair Encoding + TrainerSpec_WORD TrainerSpec_ModelType = 3 // Delimitered by whitespace. + TrainerSpec_CHAR TrainerSpec_ModelType = 4 // tokenizes into character sequence +) + +// Enum value maps for TrainerSpec_ModelType. +var ( + TrainerSpec_ModelType_name = map[int32]string{ + 1: "UNIGRAM", + 2: "BPE", + 3: "WORD", + 4: "CHAR", + } + TrainerSpec_ModelType_value = map[string]int32{ + "UNIGRAM": 1, + "BPE": 2, + "WORD": 3, + "CHAR": 4, + } +) + +func (x TrainerSpec_ModelType) Enum() *TrainerSpec_ModelType { + p := new(TrainerSpec_ModelType) + *p = x + return p +} + +func (x TrainerSpec_ModelType) String() string { + return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) +} + +func (TrainerSpec_ModelType) Descriptor() protoreflect.EnumDescriptor { + return file_sentencepiece_model_proto_enumTypes[0].Descriptor() +} + +func (TrainerSpec_ModelType) Type() protoreflect.EnumType { + return &file_sentencepiece_model_proto_enumTypes[0] +} + +func (x TrainerSpec_ModelType) Number() protoreflect.EnumNumber { + return protoreflect.EnumNumber(x) +} + +// Deprecated: Do not use. +func (x *TrainerSpec_ModelType) UnmarshalJSON(b []byte) error { + num, err := protoimpl.X.UnmarshalJSONEnum(x.Descriptor(), b) + if err != nil { + return err + } + *x = TrainerSpec_ModelType(num) + return nil +} + +// Deprecated: Use TrainerSpec_ModelType.Descriptor instead. +func (TrainerSpec_ModelType) EnumDescriptor() ([]byte, []int) { + return file_sentencepiece_model_proto_rawDescGZIP(), []int{0, 0} +} + +type ModelProto_SentencePiece_Type int32 + +const ( + ModelProto_SentencePiece_NORMAL ModelProto_SentencePiece_Type = 1 // normal symbol + ModelProto_SentencePiece_UNKNOWN ModelProto_SentencePiece_Type = 2 // unknown symbol. only for now. + ModelProto_SentencePiece_CONTROL ModelProto_SentencePiece_Type = 3 // control symbols. , , <2ja> etc. + ModelProto_SentencePiece_USER_DEFINED ModelProto_SentencePiece_Type = 4 // user defined symbols. + // Typical usage of USER_DEFINED symbol + // is placeholder. + ModelProto_SentencePiece_BYTE ModelProto_SentencePiece_Type = 6 // byte symbols. Used when `byte_fallback` is true. + ModelProto_SentencePiece_UNUSED ModelProto_SentencePiece_Type = 5 // this piece is not used. +) + +// Enum value maps for ModelProto_SentencePiece_Type. +var ( + ModelProto_SentencePiece_Type_name = map[int32]string{ + 1: "NORMAL", + 2: "UNKNOWN", + 3: "CONTROL", + 4: "USER_DEFINED", + 6: "BYTE", + 5: "UNUSED", + } + ModelProto_SentencePiece_Type_value = map[string]int32{ + "NORMAL": 1, + "UNKNOWN": 2, + "CONTROL": 3, + "USER_DEFINED": 4, + "BYTE": 6, + "UNUSED": 5, + } +) + +func (x ModelProto_SentencePiece_Type) Enum() *ModelProto_SentencePiece_Type { + p := new(ModelProto_SentencePiece_Type) + *p = x + return p +} + +func (x ModelProto_SentencePiece_Type) String() string { + return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) +} + +func (ModelProto_SentencePiece_Type) Descriptor() protoreflect.EnumDescriptor { + return file_sentencepiece_model_proto_enumTypes[1].Descriptor() +} + +func (ModelProto_SentencePiece_Type) Type() protoreflect.EnumType { + return &file_sentencepiece_model_proto_enumTypes[1] +} + +func (x ModelProto_SentencePiece_Type) Number() protoreflect.EnumNumber { + return protoreflect.EnumNumber(x) +} + +// Deprecated: Do not use. +func (x *ModelProto_SentencePiece_Type) UnmarshalJSON(b []byte) error { + num, err := protoimpl.X.UnmarshalJSONEnum(x.Descriptor(), b) + if err != nil { + return err + } + *x = ModelProto_SentencePiece_Type(num) + return nil +} + +// Deprecated: Use ModelProto_SentencePiece_Type.Descriptor instead. +func (ModelProto_SentencePiece_Type) EnumDescriptor() ([]byte, []int) { + return file_sentencepiece_model_proto_rawDescGZIP(), []int{3, 0, 0} +} + +// TrainerSpec encodes a various parameters for SentencePiece training. +// Next id: 55 +type TrainerSpec struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + extensionFields protoimpl.ExtensionFields + + // ///////////////////////////////////////////////////////////////// + // General parameters + // + // Input corpus files. + // + // Trainer accepts the following two formats: + // A) Monolingual: plain text, one sentence per line. + // B) Bilingual: TSV, source sentence target sentence + // When bilingual data is passed, shared vocabulary model is built. + // Note that the input file must be raw corpus, not a preprocessed corpus. + // Trainer only loads the first `input_sentence_size` sentences specified + // with this parameter. + Input []string `protobuf:"bytes,1,rep,name=input" json:"input,omitempty"` + // Input corpus format: + // "text": one-sentence-per-line text format (default) + // "tsv": sentence freq + InputFormat *string `protobuf:"bytes,7,opt,name=input_format,json=inputFormat" json:"input_format,omitempty"` + // Output model file prefix. + // .model and .vocab are generated. + ModelPrefix *string `protobuf:"bytes,2,opt,name=model_prefix,json=modelPrefix" json:"model_prefix,omitempty"` + ModelType *TrainerSpec_ModelType `protobuf:"varint,3,opt,name=model_type,json=modelType,enum=sentencepiece.TrainerSpec_ModelType,def=1" json:"model_type,omitempty"` + // Vocabulary size. 8k is the default size. + VocabSize *int32 `protobuf:"varint,4,opt,name=vocab_size,json=vocabSize,def=8000" json:"vocab_size,omitempty"` + // List of the languages this model can accept. + // Since the model is language-agnostic, this field is used as a reference. + AcceptLanguage []string `protobuf:"bytes,5,rep,name=accept_language,json=acceptLanguage" json:"accept_language,omitempty"` + // Size of self-test samples, which are encoded in the model file. + SelfTestSampleSize *int32 `protobuf:"varint,6,opt,name=self_test_sample_size,json=selfTestSampleSize,def=0" json:"self_test_sample_size,omitempty"` + // Whether to use DP version of sentencepiece. Use it with TSV input format + // (requires precomputed word tab counts to work). + EnableDifferentialPrivacy *bool `protobuf:"varint,50,opt,name=enable_differential_privacy,json=enableDifferentialPrivacy,def=0" json:"enable_differential_privacy,omitempty"` + // Set these parameters if you need DP version of sentencepiece. + // std of noise to add. + DifferentialPrivacyNoiseLevel *float32 `protobuf:"fixed32,51,opt,name=differential_privacy_noise_level,json=differentialPrivacyNoiseLevel,def=0" json:"differential_privacy_noise_level,omitempty"` + // Clipping threshold to apply after adding noise. All the words with + // frequency less than this value are dropped. + DifferentialPrivacyClippingThreshold *uint64 `protobuf:"varint,52,opt,name=differential_privacy_clipping_threshold,json=differentialPrivacyClippingThreshold,def=0" json:"differential_privacy_clipping_threshold,omitempty"` + // ///////////////////////////////////////////////////////////////// + // Training parameters. + // + // Uses characters which cover the corpus with the ratio of `chars_coverage`. + // This parameter determines the set of basic Alphabet of sentence piece. + // 1.0 - `chars_coverage` characters are treated as UNK. + // See also required_chars field. + CharacterCoverage *float32 `protobuf:"fixed32,10,opt,name=character_coverage,json=characterCoverage,def=0.9995" json:"character_coverage,omitempty"` + // Maximum size of sentences the trainer loads from `input` parameter. + // Trainer simply loads the `input` files in sequence. + // It is better to shuffle the input corpus randomly. + InputSentenceSize *uint64 `protobuf:"varint,11,opt,name=input_sentence_size,json=inputSentenceSize,def=0" json:"input_sentence_size,omitempty"` + ShuffleInputSentence *bool `protobuf:"varint,19,opt,name=shuffle_input_sentence,json=shuffleInputSentence,def=1" json:"shuffle_input_sentence,omitempty"` + // Maximum size of sentences to make seed sentence pieces. + // Extended suffix array is constructed to extract frequent + // sub-strings from the corpus. This uses 20N working space, + // where N is the size of corpus. + // + // Deprecated: Marked as deprecated in sentencepiece_model.proto. + MiningSentenceSize *int32 `protobuf:"varint,12,opt,name=mining_sentence_size,json=miningSentenceSize" json:"mining_sentence_size,omitempty"` + // Maximum size of sentences to train sentence pieces. + // + // Deprecated: Marked as deprecated in sentencepiece_model.proto. + TrainingSentenceSize *int32 `protobuf:"varint,13,opt,name=training_sentence_size,json=trainingSentenceSize" json:"training_sentence_size,omitempty"` + // The size of seed sentencepieces. + // `seed_sentencepiece_size` must be larger than `vocab_size`. + SeedSentencepieceSize *int32 `protobuf:"varint,14,opt,name=seed_sentencepiece_size,json=seedSentencepieceSize,def=1000000" json:"seed_sentencepiece_size,omitempty"` + // In every EM sub-iterations, keeps top + // `shrinking_factor` * `current sentencepieces size` with respect to + // the loss of the sentence piece. This value should be smaller than 1.0. + ShrinkingFactor *float32 `protobuf:"fixed32,15,opt,name=shrinking_factor,json=shrinkingFactor,def=0.75" json:"shrinking_factor,omitempty"` + // The maximum sentence length in byte. The sentences with the length + // larger than `max_sentence_length` is simply ignored. + // Longer input tends to bring the following risks: + // - Overflow during EM training (unigram language model only) + // - Performance drop because of O(n log n) cost in BPE. + MaxSentenceLength *int32 `protobuf:"varint,18,opt,name=max_sentence_length,json=maxSentenceLength,def=4192" json:"max_sentence_length,omitempty"` + // Number of threads in the training. + NumThreads *int32 `protobuf:"varint,16,opt,name=num_threads,json=numThreads,def=16" json:"num_threads,omitempty"` + // Number of EM sub iterations. + NumSubIterations *int32 `protobuf:"varint,17,opt,name=num_sub_iterations,json=numSubIterations,def=2" json:"num_sub_iterations,omitempty"` + // ///////////////////////////////////////////////////////////////// + // SentencePiece parameters which control the shapes of sentence piece. + // + // Maximum length of sentencepiece. + MaxSentencepieceLength *int32 `protobuf:"varint,20,opt,name=max_sentencepiece_length,json=maxSentencepieceLength,def=16" json:"max_sentencepiece_length,omitempty"` + // Uses Unicode script to split sentence pieces. + // When `split_by_unicode_script` is true, we do not allow sentence piece to + // include multiple Unicode scripts, e.g. "F1" is not a valid piece. + // Exception: CJ characters (Hiragana/Katakana/Han) are all handled + // as one script type, since Japanese word can consist of multiple scripts. + // This exception is always applied regardless of the accept-language + // parameter. + SplitByUnicodeScript *bool `protobuf:"varint,21,opt,name=split_by_unicode_script,json=splitByUnicodeScript,def=1" json:"split_by_unicode_script,omitempty"` + // When `split_by_number` is true, put a boundary between number and + // non-number transition. If we want to treat "F1" is one token, set this flag + // to be false. + SplitByNumber *bool `protobuf:"varint,23,opt,name=split_by_number,json=splitByNumber,def=1" json:"split_by_number,omitempty"` + // Use a white space to split sentence pieces. + // When `split_by_whitespace` is false, we may have the piece containing + // a white space in the middle. e.g., "in_the". + SplitByWhitespace *bool `protobuf:"varint,22,opt,name=split_by_whitespace,json=splitByWhitespace,def=1" json:"split_by_whitespace,omitempty"` + // Adds whitespace symbol (_) as a suffix instead of prefix. e.g., _hello => + // hello_. When `treat_whitespace_as_suffix` is true, + // NormalizerSpec::add_dummy_prefix will add the dummy whitespace to the end + // of sentence. + TreatWhitespaceAsSuffix *bool `protobuf:"varint,24,opt,name=treat_whitespace_as_suffix,json=treatWhitespaceAsSuffix,def=0" json:"treat_whitespace_as_suffix,omitempty"` + // Allows pieces that only contain whitespaces instead of appearing only as + // prefix or suffix of other pieces. + AllowWhitespaceOnlyPieces *bool `protobuf:"varint,26,opt,name=allow_whitespace_only_pieces,json=allowWhitespaceOnlyPieces,def=0" json:"allow_whitespace_only_pieces,omitempty"` + // Split all digits (0-9) into separate pieces. + SplitDigits *bool `protobuf:"varint,25,opt,name=split_digits,json=splitDigits,def=0" json:"split_digits,omitempty"` + // Defines the pre-tokenization delimiter. + // When specified, no pieces crossing this delimiter is not included + // in the vocab. Then the delimiter string is virtually ignored + // during the training. This field can allows constraints on the vocabulary + // selection. Note that this field is available on unigram mode. + PretokenizationDelimiter *string `protobuf:"bytes,53,opt,name=pretokenization_delimiter,json=pretokenizationDelimiter,def=" json:"pretokenization_delimiter,omitempty"` + // ///////////////////////////////////////////////////////////////// + // Vocabulary management + // + // Defines control symbols used as an indicator to + // change the behavior of the decoder. and are pre-defined. + // We can use this field to encode various meta information, + // including language indicator in multilingual model. + // These symbols are not visible to users, but visible to + // the decoder. Note that when the input sentence contains control symbols, + // they are not treated as one token, but segmented into normal pieces. + // Control symbols must be inserted independently from the segmentation. + ControlSymbols []string `protobuf:"bytes,30,rep,name=control_symbols,json=controlSymbols" json:"control_symbols,omitempty"` + // Defines user defined symbols. + // These symbols are added with extremely high score + // so they are always treated as one unique symbol in any context. + // Typical usage of user_defined_symbols is placeholder for named entities. + UserDefinedSymbols []string `protobuf:"bytes,31,rep,name=user_defined_symbols,json=userDefinedSymbols" json:"user_defined_symbols,omitempty"` + // Defines required characters. Each UTF8 character in this string is included + // in the character set regardless of character_coverage value. Unlike + // user_defined_symbols, these characters have scores based on the frequency + // on input sentences, and the model can form subwords using characters + // in this field. + RequiredChars *string `protobuf:"bytes,36,opt,name=required_chars,json=requiredChars" json:"required_chars,omitempty"` + // Decomposes unknown pieces into UTF-8 bytes. + ByteFallback *bool `protobuf:"varint,35,opt,name=byte_fallback,json=byteFallback,def=0" json:"byte_fallback,omitempty"` + // When creating the vocabulary file, defines whether or not to additionally + // output the score for each piece. + VocabularyOutputPieceScore *bool `protobuf:"varint,32,opt,name=vocabulary_output_piece_score,json=vocabularyOutputPieceScore,def=1" json:"vocabulary_output_piece_score,omitempty"` + // `vocab_size` is treated as hard limit. Crash if + // the model can not produce the vocab of size `vocab_size`, + // When `hard_vocab_limit` is false, vocab_size is treated + // as soft limit. Note that when model_type=char, + // always assumes hard_vocab_limit = false. + HardVocabLimit *bool `protobuf:"varint,33,opt,name=hard_vocab_limit,json=hardVocabLimit,def=1" json:"hard_vocab_limit,omitempty"` + // use all symbols for vocab extraction. This flag is valid + // if model type is either CHAR or WORD + UseAllVocab *bool `protobuf:"varint,34,opt,name=use_all_vocab,json=useAllVocab,def=0" json:"use_all_vocab,omitempty"` + // ///////////////////////////////////////////////////////////////// + // Reserved special meta tokens. + // * -1 is not used. + // * unk_id must not be -1. + // Id must start with 0 and be contiguous. + UnkId *int32 `protobuf:"varint,40,opt,name=unk_id,json=unkId,def=0" json:"unk_id,omitempty"` // + BosId *int32 `protobuf:"varint,41,opt,name=bos_id,json=bosId,def=1" json:"bos_id,omitempty"` // + EosId *int32 `protobuf:"varint,42,opt,name=eos_id,json=eosId,def=2" json:"eos_id,omitempty"` // + PadId *int32 `protobuf:"varint,43,opt,name=pad_id,json=padId,def=-1" json:"pad_id,omitempty"` // (padding) + UnkPiece *string `protobuf:"bytes,45,opt,name=unk_piece,json=unkPiece,def=" json:"unk_piece,omitempty"` + BosPiece *string `protobuf:"bytes,46,opt,name=bos_piece,json=bosPiece,def=" json:"bos_piece,omitempty"` + EosPiece *string `protobuf:"bytes,47,opt,name=eos_piece,json=eosPiece,def=" json:"eos_piece,omitempty"` + PadPiece *string `protobuf:"bytes,48,opt,name=pad_piece,json=padPiece,def=" json:"pad_piece,omitempty"` + // Encodes into U+2047 (DOUBLE QUESTION MARK), + // since this character can be useful both for user and + // developer. We can easily figure out that is emitted. + UnkSurface *string `protobuf:"bytes,44,opt,name=unk_surface,json=unkSurface,def= ⁇ " json:"unk_surface,omitempty"` + // Increase bit depth to allow unigram model training on large + // (>10M sentences) corpora. A Side-effect of enabling this flag + // is increased memory usage. + TrainExtremelyLargeCorpus *bool `protobuf:"varint,49,opt,name=train_extremely_large_corpus,json=trainExtremelyLargeCorpus,def=0" json:"train_extremely_large_corpus,omitempty"` + // Path to a seed sentencepieces file, with one tab-separated + // seed sentencepiece frequency per line. + SeedSentencepiecesFile *string `protobuf:"bytes,54,opt,name=seed_sentencepieces_file,json=seedSentencepiecesFile,def=" json:"seed_sentencepieces_file,omitempty"` +} + +// Default values for TrainerSpec fields. +const ( + Default_TrainerSpec_ModelType = TrainerSpec_UNIGRAM + Default_TrainerSpec_VocabSize = int32(8000) + Default_TrainerSpec_SelfTestSampleSize = int32(0) + Default_TrainerSpec_EnableDifferentialPrivacy = bool(false) + Default_TrainerSpec_DifferentialPrivacyNoiseLevel = float32(0) + Default_TrainerSpec_DifferentialPrivacyClippingThreshold = uint64(0) + Default_TrainerSpec_CharacterCoverage = float32(0.9994999766349792) + Default_TrainerSpec_InputSentenceSize = uint64(0) + Default_TrainerSpec_ShuffleInputSentence = bool(true) + Default_TrainerSpec_SeedSentencepieceSize = int32(1000000) + Default_TrainerSpec_ShrinkingFactor = float32(0.75) + Default_TrainerSpec_MaxSentenceLength = int32(4192) + Default_TrainerSpec_NumThreads = int32(16) + Default_TrainerSpec_NumSubIterations = int32(2) + Default_TrainerSpec_MaxSentencepieceLength = int32(16) + Default_TrainerSpec_SplitByUnicodeScript = bool(true) + Default_TrainerSpec_SplitByNumber = bool(true) + Default_TrainerSpec_SplitByWhitespace = bool(true) + Default_TrainerSpec_TreatWhitespaceAsSuffix = bool(false) + Default_TrainerSpec_AllowWhitespaceOnlyPieces = bool(false) + Default_TrainerSpec_SplitDigits = bool(false) + Default_TrainerSpec_PretokenizationDelimiter = string("") + Default_TrainerSpec_ByteFallback = bool(false) + Default_TrainerSpec_VocabularyOutputPieceScore = bool(true) + Default_TrainerSpec_HardVocabLimit = bool(true) + Default_TrainerSpec_UseAllVocab = bool(false) + Default_TrainerSpec_UnkId = int32(0) + Default_TrainerSpec_BosId = int32(1) + Default_TrainerSpec_EosId = int32(2) + Default_TrainerSpec_PadId = int32(-1) + Default_TrainerSpec_UnkPiece = string("") + Default_TrainerSpec_BosPiece = string("") + Default_TrainerSpec_EosPiece = string("") + Default_TrainerSpec_PadPiece = string("") + Default_TrainerSpec_UnkSurface = string(" ⁇ ") + Default_TrainerSpec_TrainExtremelyLargeCorpus = bool(false) + Default_TrainerSpec_SeedSentencepiecesFile = string("") +) + +func (x *TrainerSpec) Reset() { + *x = TrainerSpec{} + if protoimpl.UnsafeEnabled { + mi := &file_sentencepiece_model_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *TrainerSpec) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*TrainerSpec) ProtoMessage() {} + +func (x *TrainerSpec) ProtoReflect() protoreflect.Message { + mi := &file_sentencepiece_model_proto_msgTypes[0] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use TrainerSpec.ProtoReflect.Descriptor instead. +func (*TrainerSpec) Descriptor() ([]byte, []int) { + return file_sentencepiece_model_proto_rawDescGZIP(), []int{0} +} + +func (x *TrainerSpec) GetInput() []string { + if x != nil { + return x.Input + } + return nil +} + +func (x *TrainerSpec) GetInputFormat() string { + if x != nil && x.InputFormat != nil { + return *x.InputFormat + } + return "" +} + +func (x *TrainerSpec) GetModelPrefix() string { + if x != nil && x.ModelPrefix != nil { + return *x.ModelPrefix + } + return "" +} + +func (x *TrainerSpec) GetModelType() TrainerSpec_ModelType { + if x != nil && x.ModelType != nil { + return *x.ModelType + } + return Default_TrainerSpec_ModelType +} + +func (x *TrainerSpec) GetVocabSize() int32 { + if x != nil && x.VocabSize != nil { + return *x.VocabSize + } + return Default_TrainerSpec_VocabSize +} + +func (x *TrainerSpec) GetAcceptLanguage() []string { + if x != nil { + return x.AcceptLanguage + } + return nil +} + +func (x *TrainerSpec) GetSelfTestSampleSize() int32 { + if x != nil && x.SelfTestSampleSize != nil { + return *x.SelfTestSampleSize + } + return Default_TrainerSpec_SelfTestSampleSize +} + +func (x *TrainerSpec) GetEnableDifferentialPrivacy() bool { + if x != nil && x.EnableDifferentialPrivacy != nil { + return *x.EnableDifferentialPrivacy + } + return Default_TrainerSpec_EnableDifferentialPrivacy +} + +func (x *TrainerSpec) GetDifferentialPrivacyNoiseLevel() float32 { + if x != nil && x.DifferentialPrivacyNoiseLevel != nil { + return *x.DifferentialPrivacyNoiseLevel + } + return Default_TrainerSpec_DifferentialPrivacyNoiseLevel +} + +func (x *TrainerSpec) GetDifferentialPrivacyClippingThreshold() uint64 { + if x != nil && x.DifferentialPrivacyClippingThreshold != nil { + return *x.DifferentialPrivacyClippingThreshold + } + return Default_TrainerSpec_DifferentialPrivacyClippingThreshold +} + +func (x *TrainerSpec) GetCharacterCoverage() float32 { + if x != nil && x.CharacterCoverage != nil { + return *x.CharacterCoverage + } + return Default_TrainerSpec_CharacterCoverage +} + +func (x *TrainerSpec) GetInputSentenceSize() uint64 { + if x != nil && x.InputSentenceSize != nil { + return *x.InputSentenceSize + } + return Default_TrainerSpec_InputSentenceSize +} + +func (x *TrainerSpec) GetShuffleInputSentence() bool { + if x != nil && x.ShuffleInputSentence != nil { + return *x.ShuffleInputSentence + } + return Default_TrainerSpec_ShuffleInputSentence +} + +// Deprecated: Marked as deprecated in sentencepiece_model.proto. +func (x *TrainerSpec) GetMiningSentenceSize() int32 { + if x != nil && x.MiningSentenceSize != nil { + return *x.MiningSentenceSize + } + return 0 +} + +// Deprecated: Marked as deprecated in sentencepiece_model.proto. +func (x *TrainerSpec) GetTrainingSentenceSize() int32 { + if x != nil && x.TrainingSentenceSize != nil { + return *x.TrainingSentenceSize + } + return 0 +} + +func (x *TrainerSpec) GetSeedSentencepieceSize() int32 { + if x != nil && x.SeedSentencepieceSize != nil { + return *x.SeedSentencepieceSize + } + return Default_TrainerSpec_SeedSentencepieceSize +} + +func (x *TrainerSpec) GetShrinkingFactor() float32 { + if x != nil && x.ShrinkingFactor != nil { + return *x.ShrinkingFactor + } + return Default_TrainerSpec_ShrinkingFactor +} + +func (x *TrainerSpec) GetMaxSentenceLength() int32 { + if x != nil && x.MaxSentenceLength != nil { + return *x.MaxSentenceLength + } + return Default_TrainerSpec_MaxSentenceLength +} + +func (x *TrainerSpec) GetNumThreads() int32 { + if x != nil && x.NumThreads != nil { + return *x.NumThreads + } + return Default_TrainerSpec_NumThreads +} + +func (x *TrainerSpec) GetNumSubIterations() int32 { + if x != nil && x.NumSubIterations != nil { + return *x.NumSubIterations + } + return Default_TrainerSpec_NumSubIterations +} + +func (x *TrainerSpec) GetMaxSentencepieceLength() int32 { + if x != nil && x.MaxSentencepieceLength != nil { + return *x.MaxSentencepieceLength + } + return Default_TrainerSpec_MaxSentencepieceLength +} + +func (x *TrainerSpec) GetSplitByUnicodeScript() bool { + if x != nil && x.SplitByUnicodeScript != nil { + return *x.SplitByUnicodeScript + } + return Default_TrainerSpec_SplitByUnicodeScript +} + +func (x *TrainerSpec) GetSplitByNumber() bool { + if x != nil && x.SplitByNumber != nil { + return *x.SplitByNumber + } + return Default_TrainerSpec_SplitByNumber +} + +func (x *TrainerSpec) GetSplitByWhitespace() bool { + if x != nil && x.SplitByWhitespace != nil { + return *x.SplitByWhitespace + } + return Default_TrainerSpec_SplitByWhitespace +} + +func (x *TrainerSpec) GetTreatWhitespaceAsSuffix() bool { + if x != nil && x.TreatWhitespaceAsSuffix != nil { + return *x.TreatWhitespaceAsSuffix + } + return Default_TrainerSpec_TreatWhitespaceAsSuffix +} + +func (x *TrainerSpec) GetAllowWhitespaceOnlyPieces() bool { + if x != nil && x.AllowWhitespaceOnlyPieces != nil { + return *x.AllowWhitespaceOnlyPieces + } + return Default_TrainerSpec_AllowWhitespaceOnlyPieces +} + +func (x *TrainerSpec) GetSplitDigits() bool { + if x != nil && x.SplitDigits != nil { + return *x.SplitDigits + } + return Default_TrainerSpec_SplitDigits +} + +func (x *TrainerSpec) GetPretokenizationDelimiter() string { + if x != nil && x.PretokenizationDelimiter != nil { + return *x.PretokenizationDelimiter + } + return Default_TrainerSpec_PretokenizationDelimiter +} + +func (x *TrainerSpec) GetControlSymbols() []string { + if x != nil { + return x.ControlSymbols + } + return nil +} + +func (x *TrainerSpec) GetUserDefinedSymbols() []string { + if x != nil { + return x.UserDefinedSymbols + } + return nil +} + +func (x *TrainerSpec) GetRequiredChars() string { + if x != nil && x.RequiredChars != nil { + return *x.RequiredChars + } + return "" +} + +func (x *TrainerSpec) GetByteFallback() bool { + if x != nil && x.ByteFallback != nil { + return *x.ByteFallback + } + return Default_TrainerSpec_ByteFallback +} + +func (x *TrainerSpec) GetVocabularyOutputPieceScore() bool { + if x != nil && x.VocabularyOutputPieceScore != nil { + return *x.VocabularyOutputPieceScore + } + return Default_TrainerSpec_VocabularyOutputPieceScore +} + +func (x *TrainerSpec) GetHardVocabLimit() bool { + if x != nil && x.HardVocabLimit != nil { + return *x.HardVocabLimit + } + return Default_TrainerSpec_HardVocabLimit +} + +func (x *TrainerSpec) GetUseAllVocab() bool { + if x != nil && x.UseAllVocab != nil { + return *x.UseAllVocab + } + return Default_TrainerSpec_UseAllVocab +} + +func (x *TrainerSpec) GetUnkId() int32 { + if x != nil && x.UnkId != nil { + return *x.UnkId + } + return Default_TrainerSpec_UnkId +} + +func (x *TrainerSpec) GetBosId() int32 { + if x != nil && x.BosId != nil { + return *x.BosId + } + return Default_TrainerSpec_BosId +} + +func (x *TrainerSpec) GetEosId() int32 { + if x != nil && x.EosId != nil { + return *x.EosId + } + return Default_TrainerSpec_EosId +} + +func (x *TrainerSpec) GetPadId() int32 { + if x != nil && x.PadId != nil { + return *x.PadId + } + return Default_TrainerSpec_PadId +} + +func (x *TrainerSpec) GetUnkPiece() string { + if x != nil && x.UnkPiece != nil { + return *x.UnkPiece + } + return Default_TrainerSpec_UnkPiece +} + +func (x *TrainerSpec) GetBosPiece() string { + if x != nil && x.BosPiece != nil { + return *x.BosPiece + } + return Default_TrainerSpec_BosPiece +} + +func (x *TrainerSpec) GetEosPiece() string { + if x != nil && x.EosPiece != nil { + return *x.EosPiece + } + return Default_TrainerSpec_EosPiece +} + +func (x *TrainerSpec) GetPadPiece() string { + if x != nil && x.PadPiece != nil { + return *x.PadPiece + } + return Default_TrainerSpec_PadPiece +} + +func (x *TrainerSpec) GetUnkSurface() string { + if x != nil && x.UnkSurface != nil { + return *x.UnkSurface + } + return Default_TrainerSpec_UnkSurface +} + +func (x *TrainerSpec) GetTrainExtremelyLargeCorpus() bool { + if x != nil && x.TrainExtremelyLargeCorpus != nil { + return *x.TrainExtremelyLargeCorpus + } + return Default_TrainerSpec_TrainExtremelyLargeCorpus +} + +func (x *TrainerSpec) GetSeedSentencepiecesFile() string { + if x != nil && x.SeedSentencepiecesFile != nil { + return *x.SeedSentencepiecesFile + } + return Default_TrainerSpec_SeedSentencepiecesFile +} + +// NormalizerSpec encodes a various parameters for string normalizaiton +type NormalizerSpec struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + extensionFields protoimpl.ExtensionFields + + // name of normalization rule. + Name *string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + // Pre-compiled normalization rule created by + // Builder::GetPrecompiledCharsMap() or Builder::CompileCharsMap() method. + // Usually this field is set by Builder::GetNormalizerSpec() method. + PrecompiledCharsmap []byte `protobuf:"bytes,2,opt,name=precompiled_charsmap,json=precompiledCharsmap" json:"precompiled_charsmap,omitempty"` + // Adds dummy whitespace at the beginning of text in order to + // treat "world" in "world" and "hello world" in the same way. + AddDummyPrefix *bool `protobuf:"varint,3,opt,name=add_dummy_prefix,json=addDummyPrefix,def=1" json:"add_dummy_prefix,omitempty"` + // Removes leading, trailing, and duplicate internal whitespace. + RemoveExtraWhitespaces *bool `protobuf:"varint,4,opt,name=remove_extra_whitespaces,json=removeExtraWhitespaces,def=1" json:"remove_extra_whitespaces,omitempty"` + // Replaces whitespace with meta symbol. + // This field must be true to train sentence piece model. + EscapeWhitespaces *bool `protobuf:"varint,5,opt,name=escape_whitespaces,json=escapeWhitespaces,def=1" json:"escape_whitespaces,omitempty"` + // Custom normalization rule file in TSV format. + // https://github.com/google/sentencepiece/blob/master/doc/normalization.md + // This field is only used in SentencePieceTrainer::Train() method, which + // compiles the rule into the binary rule stored in `precompiled_charsmap`. + NormalizationRuleTsv *string `protobuf:"bytes,6,opt,name=normalization_rule_tsv,json=normalizationRuleTsv" json:"normalization_rule_tsv,omitempty"` +} + +// Default values for NormalizerSpec fields. +const ( + Default_NormalizerSpec_AddDummyPrefix = bool(true) + Default_NormalizerSpec_RemoveExtraWhitespaces = bool(true) + Default_NormalizerSpec_EscapeWhitespaces = bool(true) +) + +func (x *NormalizerSpec) Reset() { + *x = NormalizerSpec{} + if protoimpl.UnsafeEnabled { + mi := &file_sentencepiece_model_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *NormalizerSpec) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*NormalizerSpec) ProtoMessage() {} + +func (x *NormalizerSpec) ProtoReflect() protoreflect.Message { + mi := &file_sentencepiece_model_proto_msgTypes[1] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use NormalizerSpec.ProtoReflect.Descriptor instead. +func (*NormalizerSpec) Descriptor() ([]byte, []int) { + return file_sentencepiece_model_proto_rawDescGZIP(), []int{1} +} + +func (x *NormalizerSpec) GetName() string { + if x != nil && x.Name != nil { + return *x.Name + } + return "" +} + +func (x *NormalizerSpec) GetPrecompiledCharsmap() []byte { + if x != nil { + return x.PrecompiledCharsmap + } + return nil +} + +func (x *NormalizerSpec) GetAddDummyPrefix() bool { + if x != nil && x.AddDummyPrefix != nil { + return *x.AddDummyPrefix + } + return Default_NormalizerSpec_AddDummyPrefix +} + +func (x *NormalizerSpec) GetRemoveExtraWhitespaces() bool { + if x != nil && x.RemoveExtraWhitespaces != nil { + return *x.RemoveExtraWhitespaces + } + return Default_NormalizerSpec_RemoveExtraWhitespaces +} + +func (x *NormalizerSpec) GetEscapeWhitespaces() bool { + if x != nil && x.EscapeWhitespaces != nil { + return *x.EscapeWhitespaces + } + return Default_NormalizerSpec_EscapeWhitespaces +} + +func (x *NormalizerSpec) GetNormalizationRuleTsv() string { + if x != nil && x.NormalizationRuleTsv != nil { + return *x.NormalizationRuleTsv + } + return "" +} + +// Proto to store samples for self-testing. +type SelfTestData struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + extensionFields protoimpl.ExtensionFields + + Samples []*SelfTestData_Sample `protobuf:"bytes,1,rep,name=samples" json:"samples,omitempty"` +} + +func (x *SelfTestData) Reset() { + *x = SelfTestData{} + if protoimpl.UnsafeEnabled { + mi := &file_sentencepiece_model_proto_msgTypes[2] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *SelfTestData) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*SelfTestData) ProtoMessage() {} + +func (x *SelfTestData) ProtoReflect() protoreflect.Message { + mi := &file_sentencepiece_model_proto_msgTypes[2] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use SelfTestData.ProtoReflect.Descriptor instead. +func (*SelfTestData) Descriptor() ([]byte, []int) { + return file_sentencepiece_model_proto_rawDescGZIP(), []int{2} +} + +func (x *SelfTestData) GetSamples() []*SelfTestData_Sample { + if x != nil { + return x.Samples + } + return nil +} + +// ModelProto stores model parameters. +// SentencePieceProcessor is supposed to be self-contained. +// All settings/parameters which may change the behavior must be encoded +// in ModelProto. +type ModelProto struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + extensionFields protoimpl.ExtensionFields + + // Sentence pieces with scores. + Pieces []*ModelProto_SentencePiece `protobuf:"bytes,1,rep,name=pieces" json:"pieces,omitempty"` + // Spec used to generate this model file. + TrainerSpec *TrainerSpec `protobuf:"bytes,2,opt,name=trainer_spec,json=trainerSpec" json:"trainer_spec,omitempty"` + // Spec for text normalization. + NormalizerSpec *NormalizerSpec `protobuf:"bytes,3,opt,name=normalizer_spec,json=normalizerSpec" json:"normalizer_spec,omitempty"` + // Stores sample input and its expected segmentation to verify the model. + SelfTestData *SelfTestData `protobuf:"bytes,4,opt,name=self_test_data,json=selfTestData" json:"self_test_data,omitempty"` + // Spec for text de-normalization. + DenormalizerSpec *NormalizerSpec `protobuf:"bytes,5,opt,name=denormalizer_spec,json=denormalizerSpec" json:"denormalizer_spec,omitempty"` +} + +func (x *ModelProto) Reset() { + *x = ModelProto{} + if protoimpl.UnsafeEnabled { + mi := &file_sentencepiece_model_proto_msgTypes[3] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ModelProto) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ModelProto) ProtoMessage() {} + +func (x *ModelProto) ProtoReflect() protoreflect.Message { + mi := &file_sentencepiece_model_proto_msgTypes[3] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ModelProto.ProtoReflect.Descriptor instead. +func (*ModelProto) Descriptor() ([]byte, []int) { + return file_sentencepiece_model_proto_rawDescGZIP(), []int{3} +} + +func (x *ModelProto) GetPieces() []*ModelProto_SentencePiece { + if x != nil { + return x.Pieces + } + return nil +} + +func (x *ModelProto) GetTrainerSpec() *TrainerSpec { + if x != nil { + return x.TrainerSpec + } + return nil +} + +func (x *ModelProto) GetNormalizerSpec() *NormalizerSpec { + if x != nil { + return x.NormalizerSpec + } + return nil +} + +func (x *ModelProto) GetSelfTestData() *SelfTestData { + if x != nil { + return x.SelfTestData + } + return nil +} + +func (x *ModelProto) GetDenormalizerSpec() *NormalizerSpec { + if x != nil { + return x.DenormalizerSpec + } + return nil +} + +type SelfTestData_Sample struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Input *string `protobuf:"bytes,1,opt,name=input" json:"input,omitempty"` + Expected *string `protobuf:"bytes,2,opt,name=expected" json:"expected,omitempty"` +} + +func (x *SelfTestData_Sample) Reset() { + *x = SelfTestData_Sample{} + if protoimpl.UnsafeEnabled { + mi := &file_sentencepiece_model_proto_msgTypes[4] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *SelfTestData_Sample) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*SelfTestData_Sample) ProtoMessage() {} + +func (x *SelfTestData_Sample) ProtoReflect() protoreflect.Message { + mi := &file_sentencepiece_model_proto_msgTypes[4] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use SelfTestData_Sample.ProtoReflect.Descriptor instead. +func (*SelfTestData_Sample) Descriptor() ([]byte, []int) { + return file_sentencepiece_model_proto_rawDescGZIP(), []int{2, 0} +} + +func (x *SelfTestData_Sample) GetInput() string { + if x != nil && x.Input != nil { + return *x.Input + } + return "" +} + +func (x *SelfTestData_Sample) GetExpected() string { + if x != nil && x.Expected != nil { + return *x.Expected + } + return "" +} + +type ModelProto_SentencePiece struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + extensionFields protoimpl.ExtensionFields + + Piece *string `protobuf:"bytes,1,opt,name=piece" json:"piece,omitempty"` // piece must not be empty. + Score *float32 `protobuf:"fixed32,2,opt,name=score" json:"score,omitempty"` + Type *ModelProto_SentencePiece_Type `protobuf:"varint,3,opt,name=type,enum=sentencepiece.ModelProto_SentencePiece_Type,def=1" json:"type,omitempty"` +} + +// Default values for ModelProto_SentencePiece fields. +const ( + Default_ModelProto_SentencePiece_Type = ModelProto_SentencePiece_NORMAL +) + +func (x *ModelProto_SentencePiece) Reset() { + *x = ModelProto_SentencePiece{} + if protoimpl.UnsafeEnabled { + mi := &file_sentencepiece_model_proto_msgTypes[5] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ModelProto_SentencePiece) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ModelProto_SentencePiece) ProtoMessage() {} + +func (x *ModelProto_SentencePiece) ProtoReflect() protoreflect.Message { + mi := &file_sentencepiece_model_proto_msgTypes[5] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ModelProto_SentencePiece.ProtoReflect.Descriptor instead. +func (*ModelProto_SentencePiece) Descriptor() ([]byte, []int) { + return file_sentencepiece_model_proto_rawDescGZIP(), []int{3, 0} +} + +func (x *ModelProto_SentencePiece) GetPiece() string { + if x != nil && x.Piece != nil { + return *x.Piece + } + return "" +} + +func (x *ModelProto_SentencePiece) GetScore() float32 { + if x != nil && x.Score != nil { + return *x.Score + } + return 0 +} + +func (x *ModelProto_SentencePiece) GetType() ModelProto_SentencePiece_Type { + if x != nil && x.Type != nil { + return *x.Type + } + return Default_ModelProto_SentencePiece_Type +} + +var File_sentencepiece_model_proto protoreflect.FileDescriptor + +var file_sentencepiece_model_proto_rawDesc = []byte{ + 0x0a, 0x19, 0x73, 0x65, 0x6e, 0x74, 0x65, 0x6e, 0x63, 0x65, 0x70, 0x69, 0x65, 0x63, 0x65, 0x5f, + 0x6d, 0x6f, 0x64, 0x65, 0x6c, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x0d, 0x73, 0x65, 0x6e, + 0x74, 0x65, 0x6e, 0x63, 0x65, 0x70, 0x69, 0x65, 0x63, 0x65, 0x22, 0xc6, 0x12, 0x0a, 0x0b, 0x54, + 0x72, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x53, 0x70, 0x65, 0x63, 0x12, 0x14, 0x0a, 0x05, 0x69, 0x6e, + 0x70, 0x75, 0x74, 0x18, 0x01, 0x20, 0x03, 0x28, 0x09, 0x52, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, + 0x12, 0x21, 0x0a, 0x0c, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x5f, 0x66, 0x6f, 0x72, 0x6d, 0x61, 0x74, + 0x18, 0x07, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x46, 0x6f, 0x72, + 0x6d, 0x61, 0x74, 0x12, 0x21, 0x0a, 0x0c, 0x6d, 0x6f, 0x64, 0x65, 0x6c, 0x5f, 0x70, 0x72, 0x65, + 0x66, 0x69, 0x78, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x6d, 0x6f, 0x64, 0x65, 0x6c, + 0x50, 0x72, 0x65, 0x66, 0x69, 0x78, 0x12, 0x4c, 0x0a, 0x0a, 0x6d, 0x6f, 0x64, 0x65, 0x6c, 0x5f, + 0x74, 0x79, 0x70, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x24, 0x2e, 0x73, 0x65, 0x6e, + 0x74, 0x65, 0x6e, 0x63, 0x65, 0x70, 0x69, 0x65, 0x63, 0x65, 0x2e, 0x54, 0x72, 0x61, 0x69, 0x6e, + 0x65, 0x72, 0x53, 0x70, 0x65, 0x63, 0x2e, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x54, 0x79, 0x70, 0x65, + 0x3a, 0x07, 0x55, 0x4e, 0x49, 0x47, 0x52, 0x41, 0x4d, 0x52, 0x09, 0x6d, 0x6f, 0x64, 0x65, 0x6c, + 0x54, 0x79, 0x70, 0x65, 0x12, 0x23, 0x0a, 0x0a, 0x76, 0x6f, 0x63, 0x61, 0x62, 0x5f, 0x73, 0x69, + 0x7a, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x05, 0x3a, 0x04, 0x38, 0x30, 0x30, 0x30, 0x52, 0x09, + 0x76, 0x6f, 0x63, 0x61, 0x62, 0x53, 0x69, 0x7a, 0x65, 0x12, 0x27, 0x0a, 0x0f, 0x61, 0x63, 0x63, + 0x65, 0x70, 0x74, 0x5f, 0x6c, 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, 0x18, 0x05, 0x20, 0x03, + 0x28, 0x09, 0x52, 0x0e, 0x61, 0x63, 0x63, 0x65, 0x70, 0x74, 0x4c, 0x61, 0x6e, 0x67, 0x75, 0x61, + 0x67, 0x65, 0x12, 0x34, 0x0a, 0x15, 0x73, 0x65, 0x6c, 0x66, 0x5f, 0x74, 0x65, 0x73, 0x74, 0x5f, + 0x73, 0x61, 0x6d, 0x70, 0x6c, 0x65, 0x5f, 0x73, 0x69, 0x7a, 0x65, 0x18, 0x06, 0x20, 0x01, 0x28, + 0x05, 0x3a, 0x01, 0x30, 0x52, 0x12, 0x73, 0x65, 0x6c, 0x66, 0x54, 0x65, 0x73, 0x74, 0x53, 0x61, + 0x6d, 0x70, 0x6c, 0x65, 0x53, 0x69, 0x7a, 0x65, 0x12, 0x45, 0x0a, 0x1b, 0x65, 0x6e, 0x61, 0x62, + 0x6c, 0x65, 0x5f, 0x64, 0x69, 0x66, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x74, 0x69, 0x61, 0x6c, 0x5f, + 0x70, 0x72, 0x69, 0x76, 0x61, 0x63, 0x79, 0x18, 0x32, 0x20, 0x01, 0x28, 0x08, 0x3a, 0x05, 0x66, + 0x61, 0x6c, 0x73, 0x65, 0x52, 0x19, 0x65, 0x6e, 0x61, 0x62, 0x6c, 0x65, 0x44, 0x69, 0x66, 0x66, + 0x65, 0x72, 0x65, 0x6e, 0x74, 0x69, 0x61, 0x6c, 0x50, 0x72, 0x69, 0x76, 0x61, 0x63, 0x79, 0x12, + 0x4a, 0x0a, 0x20, 0x64, 0x69, 0x66, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x74, 0x69, 0x61, 0x6c, 0x5f, + 0x70, 0x72, 0x69, 0x76, 0x61, 0x63, 0x79, 0x5f, 0x6e, 0x6f, 0x69, 0x73, 0x65, 0x5f, 0x6c, 0x65, + 0x76, 0x65, 0x6c, 0x18, 0x33, 0x20, 0x01, 0x28, 0x02, 0x3a, 0x01, 0x30, 0x52, 0x1d, 0x64, 0x69, + 0x66, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x74, 0x69, 0x61, 0x6c, 0x50, 0x72, 0x69, 0x76, 0x61, 0x63, + 0x79, 0x4e, 0x6f, 0x69, 0x73, 0x65, 0x4c, 0x65, 0x76, 0x65, 0x6c, 0x12, 0x58, 0x0a, 0x27, 0x64, + 0x69, 0x66, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x74, 0x69, 0x61, 0x6c, 0x5f, 0x70, 0x72, 0x69, 0x76, + 0x61, 0x63, 0x79, 0x5f, 0x63, 0x6c, 0x69, 0x70, 0x70, 0x69, 0x6e, 0x67, 0x5f, 0x74, 0x68, 0x72, + 0x65, 0x73, 0x68, 0x6f, 0x6c, 0x64, 0x18, 0x34, 0x20, 0x01, 0x28, 0x04, 0x3a, 0x01, 0x30, 0x52, + 0x24, 0x64, 0x69, 0x66, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x74, 0x69, 0x61, 0x6c, 0x50, 0x72, 0x69, + 0x76, 0x61, 0x63, 0x79, 0x43, 0x6c, 0x69, 0x70, 0x70, 0x69, 0x6e, 0x67, 0x54, 0x68, 0x72, 0x65, + 0x73, 0x68, 0x6f, 0x6c, 0x64, 0x12, 0x35, 0x0a, 0x12, 0x63, 0x68, 0x61, 0x72, 0x61, 0x63, 0x74, + 0x65, 0x72, 0x5f, 0x63, 0x6f, 0x76, 0x65, 0x72, 0x61, 0x67, 0x65, 0x18, 0x0a, 0x20, 0x01, 0x28, + 0x02, 0x3a, 0x06, 0x30, 0x2e, 0x39, 0x39, 0x39, 0x35, 0x52, 0x11, 0x63, 0x68, 0x61, 0x72, 0x61, + 0x63, 0x74, 0x65, 0x72, 0x43, 0x6f, 0x76, 0x65, 0x72, 0x61, 0x67, 0x65, 0x12, 0x31, 0x0a, 0x13, + 0x69, 0x6e, 0x70, 0x75, 0x74, 0x5f, 0x73, 0x65, 0x6e, 0x74, 0x65, 0x6e, 0x63, 0x65, 0x5f, 0x73, + 0x69, 0x7a, 0x65, 0x18, 0x0b, 0x20, 0x01, 0x28, 0x04, 0x3a, 0x01, 0x30, 0x52, 0x11, 0x69, 0x6e, + 0x70, 0x75, 0x74, 0x53, 0x65, 0x6e, 0x74, 0x65, 0x6e, 0x63, 0x65, 0x53, 0x69, 0x7a, 0x65, 0x12, + 0x3a, 0x0a, 0x16, 0x73, 0x68, 0x75, 0x66, 0x66, 0x6c, 0x65, 0x5f, 0x69, 0x6e, 0x70, 0x75, 0x74, + 0x5f, 0x73, 0x65, 0x6e, 0x74, 0x65, 0x6e, 0x63, 0x65, 0x18, 0x13, 0x20, 0x01, 0x28, 0x08, 0x3a, + 0x04, 0x74, 0x72, 0x75, 0x65, 0x52, 0x14, 0x73, 0x68, 0x75, 0x66, 0x66, 0x6c, 0x65, 0x49, 0x6e, + 0x70, 0x75, 0x74, 0x53, 0x65, 0x6e, 0x74, 0x65, 0x6e, 0x63, 0x65, 0x12, 0x34, 0x0a, 0x14, 0x6d, + 0x69, 0x6e, 0x69, 0x6e, 0x67, 0x5f, 0x73, 0x65, 0x6e, 0x74, 0x65, 0x6e, 0x63, 0x65, 0x5f, 0x73, + 0x69, 0x7a, 0x65, 0x18, 0x0c, 0x20, 0x01, 0x28, 0x05, 0x42, 0x02, 0x18, 0x01, 0x52, 0x12, 0x6d, + 0x69, 0x6e, 0x69, 0x6e, 0x67, 0x53, 0x65, 0x6e, 0x74, 0x65, 0x6e, 0x63, 0x65, 0x53, 0x69, 0x7a, + 0x65, 0x12, 0x38, 0x0a, 0x16, 0x74, 0x72, 0x61, 0x69, 0x6e, 0x69, 0x6e, 0x67, 0x5f, 0x73, 0x65, + 0x6e, 0x74, 0x65, 0x6e, 0x63, 0x65, 0x5f, 0x73, 0x69, 0x7a, 0x65, 0x18, 0x0d, 0x20, 0x01, 0x28, + 0x05, 0x42, 0x02, 0x18, 0x01, 0x52, 0x14, 0x74, 0x72, 0x61, 0x69, 0x6e, 0x69, 0x6e, 0x67, 0x53, + 0x65, 0x6e, 0x74, 0x65, 0x6e, 0x63, 0x65, 0x53, 0x69, 0x7a, 0x65, 0x12, 0x3f, 0x0a, 0x17, 0x73, + 0x65, 0x65, 0x64, 0x5f, 0x73, 0x65, 0x6e, 0x74, 0x65, 0x6e, 0x63, 0x65, 0x70, 0x69, 0x65, 0x63, + 0x65, 0x5f, 0x73, 0x69, 0x7a, 0x65, 0x18, 0x0e, 0x20, 0x01, 0x28, 0x05, 0x3a, 0x07, 0x31, 0x30, + 0x30, 0x30, 0x30, 0x30, 0x30, 0x52, 0x15, 0x73, 0x65, 0x65, 0x64, 0x53, 0x65, 0x6e, 0x74, 0x65, + 0x6e, 0x63, 0x65, 0x70, 0x69, 0x65, 0x63, 0x65, 0x53, 0x69, 0x7a, 0x65, 0x12, 0x2f, 0x0a, 0x10, + 0x73, 0x68, 0x72, 0x69, 0x6e, 0x6b, 0x69, 0x6e, 0x67, 0x5f, 0x66, 0x61, 0x63, 0x74, 0x6f, 0x72, + 0x18, 0x0f, 0x20, 0x01, 0x28, 0x02, 0x3a, 0x04, 0x30, 0x2e, 0x37, 0x35, 0x52, 0x0f, 0x73, 0x68, + 0x72, 0x69, 0x6e, 0x6b, 0x69, 0x6e, 0x67, 0x46, 0x61, 0x63, 0x74, 0x6f, 0x72, 0x12, 0x34, 0x0a, + 0x13, 0x6d, 0x61, 0x78, 0x5f, 0x73, 0x65, 0x6e, 0x74, 0x65, 0x6e, 0x63, 0x65, 0x5f, 0x6c, 0x65, + 0x6e, 0x67, 0x74, 0x68, 0x18, 0x12, 0x20, 0x01, 0x28, 0x05, 0x3a, 0x04, 0x34, 0x31, 0x39, 0x32, + 0x52, 0x11, 0x6d, 0x61, 0x78, 0x53, 0x65, 0x6e, 0x74, 0x65, 0x6e, 0x63, 0x65, 0x4c, 0x65, 0x6e, + 0x67, 0x74, 0x68, 0x12, 0x23, 0x0a, 0x0b, 0x6e, 0x75, 0x6d, 0x5f, 0x74, 0x68, 0x72, 0x65, 0x61, + 0x64, 0x73, 0x18, 0x10, 0x20, 0x01, 0x28, 0x05, 0x3a, 0x02, 0x31, 0x36, 0x52, 0x0a, 0x6e, 0x75, + 0x6d, 0x54, 0x68, 0x72, 0x65, 0x61, 0x64, 0x73, 0x12, 0x2f, 0x0a, 0x12, 0x6e, 0x75, 0x6d, 0x5f, + 0x73, 0x75, 0x62, 0x5f, 0x69, 0x74, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x11, + 0x20, 0x01, 0x28, 0x05, 0x3a, 0x01, 0x32, 0x52, 0x10, 0x6e, 0x75, 0x6d, 0x53, 0x75, 0x62, 0x49, + 0x74, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x3c, 0x0a, 0x18, 0x6d, 0x61, 0x78, + 0x5f, 0x73, 0x65, 0x6e, 0x74, 0x65, 0x6e, 0x63, 0x65, 0x70, 0x69, 0x65, 0x63, 0x65, 0x5f, 0x6c, + 0x65, 0x6e, 0x67, 0x74, 0x68, 0x18, 0x14, 0x20, 0x01, 0x28, 0x05, 0x3a, 0x02, 0x31, 0x36, 0x52, + 0x16, 0x6d, 0x61, 0x78, 0x53, 0x65, 0x6e, 0x74, 0x65, 0x6e, 0x63, 0x65, 0x70, 0x69, 0x65, 0x63, + 0x65, 0x4c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x12, 0x3b, 0x0a, 0x17, 0x73, 0x70, 0x6c, 0x69, 0x74, + 0x5f, 0x62, 0x79, 0x5f, 0x75, 0x6e, 0x69, 0x63, 0x6f, 0x64, 0x65, 0x5f, 0x73, 0x63, 0x72, 0x69, + 0x70, 0x74, 0x18, 0x15, 0x20, 0x01, 0x28, 0x08, 0x3a, 0x04, 0x74, 0x72, 0x75, 0x65, 0x52, 0x14, + 0x73, 0x70, 0x6c, 0x69, 0x74, 0x42, 0x79, 0x55, 0x6e, 0x69, 0x63, 0x6f, 0x64, 0x65, 0x53, 0x63, + 0x72, 0x69, 0x70, 0x74, 0x12, 0x2c, 0x0a, 0x0f, 0x73, 0x70, 0x6c, 0x69, 0x74, 0x5f, 0x62, 0x79, + 0x5f, 0x6e, 0x75, 0x6d, 0x62, 0x65, 0x72, 0x18, 0x17, 0x20, 0x01, 0x28, 0x08, 0x3a, 0x04, 0x74, + 0x72, 0x75, 0x65, 0x52, 0x0d, 0x73, 0x70, 0x6c, 0x69, 0x74, 0x42, 0x79, 0x4e, 0x75, 0x6d, 0x62, + 0x65, 0x72, 0x12, 0x34, 0x0a, 0x13, 0x73, 0x70, 0x6c, 0x69, 0x74, 0x5f, 0x62, 0x79, 0x5f, 0x77, + 0x68, 0x69, 0x74, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x18, 0x16, 0x20, 0x01, 0x28, 0x08, 0x3a, + 0x04, 0x74, 0x72, 0x75, 0x65, 0x52, 0x11, 0x73, 0x70, 0x6c, 0x69, 0x74, 0x42, 0x79, 0x57, 0x68, + 0x69, 0x74, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x12, 0x42, 0x0a, 0x1a, 0x74, 0x72, 0x65, 0x61, + 0x74, 0x5f, 0x77, 0x68, 0x69, 0x74, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x5f, 0x61, 0x73, 0x5f, + 0x73, 0x75, 0x66, 0x66, 0x69, 0x78, 0x18, 0x18, 0x20, 0x01, 0x28, 0x08, 0x3a, 0x05, 0x66, 0x61, + 0x6c, 0x73, 0x65, 0x52, 0x17, 0x74, 0x72, 0x65, 0x61, 0x74, 0x57, 0x68, 0x69, 0x74, 0x65, 0x73, + 0x70, 0x61, 0x63, 0x65, 0x41, 0x73, 0x53, 0x75, 0x66, 0x66, 0x69, 0x78, 0x12, 0x46, 0x0a, 0x1c, + 0x61, 0x6c, 0x6c, 0x6f, 0x77, 0x5f, 0x77, 0x68, 0x69, 0x74, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, + 0x5f, 0x6f, 0x6e, 0x6c, 0x79, 0x5f, 0x70, 0x69, 0x65, 0x63, 0x65, 0x73, 0x18, 0x1a, 0x20, 0x01, + 0x28, 0x08, 0x3a, 0x05, 0x66, 0x61, 0x6c, 0x73, 0x65, 0x52, 0x19, 0x61, 0x6c, 0x6c, 0x6f, 0x77, + 0x57, 0x68, 0x69, 0x74, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x4f, 0x6e, 0x6c, 0x79, 0x50, 0x69, + 0x65, 0x63, 0x65, 0x73, 0x12, 0x28, 0x0a, 0x0c, 0x73, 0x70, 0x6c, 0x69, 0x74, 0x5f, 0x64, 0x69, + 0x67, 0x69, 0x74, 0x73, 0x18, 0x19, 0x20, 0x01, 0x28, 0x08, 0x3a, 0x05, 0x66, 0x61, 0x6c, 0x73, + 0x65, 0x52, 0x0b, 0x73, 0x70, 0x6c, 0x69, 0x74, 0x44, 0x69, 0x67, 0x69, 0x74, 0x73, 0x12, 0x3d, + 0x0a, 0x19, 0x70, 0x72, 0x65, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x69, 0x7a, 0x61, 0x74, 0x69, 0x6f, + 0x6e, 0x5f, 0x64, 0x65, 0x6c, 0x69, 0x6d, 0x69, 0x74, 0x65, 0x72, 0x18, 0x35, 0x20, 0x01, 0x28, + 0x09, 0x3a, 0x00, 0x52, 0x18, 0x70, 0x72, 0x65, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x69, 0x7a, 0x61, + 0x74, 0x69, 0x6f, 0x6e, 0x44, 0x65, 0x6c, 0x69, 0x6d, 0x69, 0x74, 0x65, 0x72, 0x12, 0x27, 0x0a, + 0x0f, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x5f, 0x73, 0x79, 0x6d, 0x62, 0x6f, 0x6c, 0x73, + 0x18, 0x1e, 0x20, 0x03, 0x28, 0x09, 0x52, 0x0e, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x53, + 0x79, 0x6d, 0x62, 0x6f, 0x6c, 0x73, 0x12, 0x30, 0x0a, 0x14, 0x75, 0x73, 0x65, 0x72, 0x5f, 0x64, + 0x65, 0x66, 0x69, 0x6e, 0x65, 0x64, 0x5f, 0x73, 0x79, 0x6d, 0x62, 0x6f, 0x6c, 0x73, 0x18, 0x1f, + 0x20, 0x03, 0x28, 0x09, 0x52, 0x12, 0x75, 0x73, 0x65, 0x72, 0x44, 0x65, 0x66, 0x69, 0x6e, 0x65, + 0x64, 0x53, 0x79, 0x6d, 0x62, 0x6f, 0x6c, 0x73, 0x12, 0x25, 0x0a, 0x0e, 0x72, 0x65, 0x71, 0x75, + 0x69, 0x72, 0x65, 0x64, 0x5f, 0x63, 0x68, 0x61, 0x72, 0x73, 0x18, 0x24, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x0d, 0x72, 0x65, 0x71, 0x75, 0x69, 0x72, 0x65, 0x64, 0x43, 0x68, 0x61, 0x72, 0x73, 0x12, + 0x2a, 0x0a, 0x0d, 0x62, 0x79, 0x74, 0x65, 0x5f, 0x66, 0x61, 0x6c, 0x6c, 0x62, 0x61, 0x63, 0x6b, + 0x18, 0x23, 0x20, 0x01, 0x28, 0x08, 0x3a, 0x05, 0x66, 0x61, 0x6c, 0x73, 0x65, 0x52, 0x0c, 0x62, + 0x79, 0x74, 0x65, 0x46, 0x61, 0x6c, 0x6c, 0x62, 0x61, 0x63, 0x6b, 0x12, 0x47, 0x0a, 0x1d, 0x76, + 0x6f, 0x63, 0x61, 0x62, 0x75, 0x6c, 0x61, 0x72, 0x79, 0x5f, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, + 0x5f, 0x70, 0x69, 0x65, 0x63, 0x65, 0x5f, 0x73, 0x63, 0x6f, 0x72, 0x65, 0x18, 0x20, 0x20, 0x01, + 0x28, 0x08, 0x3a, 0x04, 0x74, 0x72, 0x75, 0x65, 0x52, 0x1a, 0x76, 0x6f, 0x63, 0x61, 0x62, 0x75, + 0x6c, 0x61, 0x72, 0x79, 0x4f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x50, 0x69, 0x65, 0x63, 0x65, 0x53, + 0x63, 0x6f, 0x72, 0x65, 0x12, 0x2e, 0x0a, 0x10, 0x68, 0x61, 0x72, 0x64, 0x5f, 0x76, 0x6f, 0x63, + 0x61, 0x62, 0x5f, 0x6c, 0x69, 0x6d, 0x69, 0x74, 0x18, 0x21, 0x20, 0x01, 0x28, 0x08, 0x3a, 0x04, + 0x74, 0x72, 0x75, 0x65, 0x52, 0x0e, 0x68, 0x61, 0x72, 0x64, 0x56, 0x6f, 0x63, 0x61, 0x62, 0x4c, + 0x69, 0x6d, 0x69, 0x74, 0x12, 0x29, 0x0a, 0x0d, 0x75, 0x73, 0x65, 0x5f, 0x61, 0x6c, 0x6c, 0x5f, + 0x76, 0x6f, 0x63, 0x61, 0x62, 0x18, 0x22, 0x20, 0x01, 0x28, 0x08, 0x3a, 0x05, 0x66, 0x61, 0x6c, + 0x73, 0x65, 0x52, 0x0b, 0x75, 0x73, 0x65, 0x41, 0x6c, 0x6c, 0x56, 0x6f, 0x63, 0x61, 0x62, 0x12, + 0x18, 0x0a, 0x06, 0x75, 0x6e, 0x6b, 0x5f, 0x69, 0x64, 0x18, 0x28, 0x20, 0x01, 0x28, 0x05, 0x3a, + 0x01, 0x30, 0x52, 0x05, 0x75, 0x6e, 0x6b, 0x49, 0x64, 0x12, 0x18, 0x0a, 0x06, 0x62, 0x6f, 0x73, + 0x5f, 0x69, 0x64, 0x18, 0x29, 0x20, 0x01, 0x28, 0x05, 0x3a, 0x01, 0x31, 0x52, 0x05, 0x62, 0x6f, + 0x73, 0x49, 0x64, 0x12, 0x18, 0x0a, 0x06, 0x65, 0x6f, 0x73, 0x5f, 0x69, 0x64, 0x18, 0x2a, 0x20, + 0x01, 0x28, 0x05, 0x3a, 0x01, 0x32, 0x52, 0x05, 0x65, 0x6f, 0x73, 0x49, 0x64, 0x12, 0x19, 0x0a, + 0x06, 0x70, 0x61, 0x64, 0x5f, 0x69, 0x64, 0x18, 0x2b, 0x20, 0x01, 0x28, 0x05, 0x3a, 0x02, 0x2d, + 0x31, 0x52, 0x05, 0x70, 0x61, 0x64, 0x49, 0x64, 0x12, 0x22, 0x0a, 0x09, 0x75, 0x6e, 0x6b, 0x5f, + 0x70, 0x69, 0x65, 0x63, 0x65, 0x18, 0x2d, 0x20, 0x01, 0x28, 0x09, 0x3a, 0x05, 0x3c, 0x75, 0x6e, + 0x6b, 0x3e, 0x52, 0x08, 0x75, 0x6e, 0x6b, 0x50, 0x69, 0x65, 0x63, 0x65, 0x12, 0x20, 0x0a, 0x09, + 0x62, 0x6f, 0x73, 0x5f, 0x70, 0x69, 0x65, 0x63, 0x65, 0x18, 0x2e, 0x20, 0x01, 0x28, 0x09, 0x3a, + 0x03, 0x3c, 0x73, 0x3e, 0x52, 0x08, 0x62, 0x6f, 0x73, 0x50, 0x69, 0x65, 0x63, 0x65, 0x12, 0x21, + 0x0a, 0x09, 0x65, 0x6f, 0x73, 0x5f, 0x70, 0x69, 0x65, 0x63, 0x65, 0x18, 0x2f, 0x20, 0x01, 0x28, + 0x09, 0x3a, 0x04, 0x3c, 0x2f, 0x73, 0x3e, 0x52, 0x08, 0x65, 0x6f, 0x73, 0x50, 0x69, 0x65, 0x63, + 0x65, 0x12, 0x22, 0x0a, 0x09, 0x70, 0x61, 0x64, 0x5f, 0x70, 0x69, 0x65, 0x63, 0x65, 0x18, 0x30, + 0x20, 0x01, 0x28, 0x09, 0x3a, 0x05, 0x3c, 0x70, 0x61, 0x64, 0x3e, 0x52, 0x08, 0x70, 0x61, 0x64, + 0x50, 0x69, 0x65, 0x63, 0x65, 0x12, 0x26, 0x0a, 0x0b, 0x75, 0x6e, 0x6b, 0x5f, 0x73, 0x75, 0x72, + 0x66, 0x61, 0x63, 0x65, 0x18, 0x2c, 0x20, 0x01, 0x28, 0x09, 0x3a, 0x05, 0x20, 0xe2, 0x81, 0x87, + 0x20, 0x52, 0x0a, 0x75, 0x6e, 0x6b, 0x53, 0x75, 0x72, 0x66, 0x61, 0x63, 0x65, 0x12, 0x46, 0x0a, + 0x1c, 0x74, 0x72, 0x61, 0x69, 0x6e, 0x5f, 0x65, 0x78, 0x74, 0x72, 0x65, 0x6d, 0x65, 0x6c, 0x79, + 0x5f, 0x6c, 0x61, 0x72, 0x67, 0x65, 0x5f, 0x63, 0x6f, 0x72, 0x70, 0x75, 0x73, 0x18, 0x31, 0x20, + 0x01, 0x28, 0x08, 0x3a, 0x05, 0x66, 0x61, 0x6c, 0x73, 0x65, 0x52, 0x19, 0x74, 0x72, 0x61, 0x69, + 0x6e, 0x45, 0x78, 0x74, 0x72, 0x65, 0x6d, 0x65, 0x6c, 0x79, 0x4c, 0x61, 0x72, 0x67, 0x65, 0x43, + 0x6f, 0x72, 0x70, 0x75, 0x73, 0x12, 0x3a, 0x0a, 0x18, 0x73, 0x65, 0x65, 0x64, 0x5f, 0x73, 0x65, + 0x6e, 0x74, 0x65, 0x6e, 0x63, 0x65, 0x70, 0x69, 0x65, 0x63, 0x65, 0x73, 0x5f, 0x66, 0x69, 0x6c, + 0x65, 0x18, 0x36, 0x20, 0x01, 0x28, 0x09, 0x3a, 0x00, 0x52, 0x16, 0x73, 0x65, 0x65, 0x64, 0x53, + 0x65, 0x6e, 0x74, 0x65, 0x6e, 0x63, 0x65, 0x70, 0x69, 0x65, 0x63, 0x65, 0x73, 0x46, 0x69, 0x6c, + 0x65, 0x22, 0x35, 0x0a, 0x09, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x54, 0x79, 0x70, 0x65, 0x12, 0x0b, + 0x0a, 0x07, 0x55, 0x4e, 0x49, 0x47, 0x52, 0x41, 0x4d, 0x10, 0x01, 0x12, 0x07, 0x0a, 0x03, 0x42, + 0x50, 0x45, 0x10, 0x02, 0x12, 0x08, 0x0a, 0x04, 0x57, 0x4f, 0x52, 0x44, 0x10, 0x03, 0x12, 0x08, + 0x0a, 0x04, 0x43, 0x48, 0x41, 0x52, 0x10, 0x04, 0x2a, 0x09, 0x08, 0xc8, 0x01, 0x10, 0x80, 0x80, + 0x80, 0x80, 0x02, 0x22, 0xbd, 0x02, 0x0a, 0x0e, 0x4e, 0x6f, 0x72, 0x6d, 0x61, 0x6c, 0x69, 0x7a, + 0x65, 0x72, 0x53, 0x70, 0x65, 0x63, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x31, 0x0a, 0x14, 0x70, 0x72, + 0x65, 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x64, 0x5f, 0x63, 0x68, 0x61, 0x72, 0x73, 0x6d, + 0x61, 0x70, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x13, 0x70, 0x72, 0x65, 0x63, 0x6f, 0x6d, + 0x70, 0x69, 0x6c, 0x65, 0x64, 0x43, 0x68, 0x61, 0x72, 0x73, 0x6d, 0x61, 0x70, 0x12, 0x2e, 0x0a, + 0x10, 0x61, 0x64, 0x64, 0x5f, 0x64, 0x75, 0x6d, 0x6d, 0x79, 0x5f, 0x70, 0x72, 0x65, 0x66, 0x69, + 0x78, 0x18, 0x03, 0x20, 0x01, 0x28, 0x08, 0x3a, 0x04, 0x74, 0x72, 0x75, 0x65, 0x52, 0x0e, 0x61, + 0x64, 0x64, 0x44, 0x75, 0x6d, 0x6d, 0x79, 0x50, 0x72, 0x65, 0x66, 0x69, 0x78, 0x12, 0x3e, 0x0a, + 0x18, 0x72, 0x65, 0x6d, 0x6f, 0x76, 0x65, 0x5f, 0x65, 0x78, 0x74, 0x72, 0x61, 0x5f, 0x77, 0x68, + 0x69, 0x74, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x73, 0x18, 0x04, 0x20, 0x01, 0x28, 0x08, 0x3a, + 0x04, 0x74, 0x72, 0x75, 0x65, 0x52, 0x16, 0x72, 0x65, 0x6d, 0x6f, 0x76, 0x65, 0x45, 0x78, 0x74, + 0x72, 0x61, 0x57, 0x68, 0x69, 0x74, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x73, 0x12, 0x33, 0x0a, + 0x12, 0x65, 0x73, 0x63, 0x61, 0x70, 0x65, 0x5f, 0x77, 0x68, 0x69, 0x74, 0x65, 0x73, 0x70, 0x61, + 0x63, 0x65, 0x73, 0x18, 0x05, 0x20, 0x01, 0x28, 0x08, 0x3a, 0x04, 0x74, 0x72, 0x75, 0x65, 0x52, + 0x11, 0x65, 0x73, 0x63, 0x61, 0x70, 0x65, 0x57, 0x68, 0x69, 0x74, 0x65, 0x73, 0x70, 0x61, 0x63, + 0x65, 0x73, 0x12, 0x34, 0x0a, 0x16, 0x6e, 0x6f, 0x72, 0x6d, 0x61, 0x6c, 0x69, 0x7a, 0x61, 0x74, + 0x69, 0x6f, 0x6e, 0x5f, 0x72, 0x75, 0x6c, 0x65, 0x5f, 0x74, 0x73, 0x76, 0x18, 0x06, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x14, 0x6e, 0x6f, 0x72, 0x6d, 0x61, 0x6c, 0x69, 0x7a, 0x61, 0x74, 0x69, 0x6f, + 0x6e, 0x52, 0x75, 0x6c, 0x65, 0x54, 0x73, 0x76, 0x2a, 0x09, 0x08, 0xc8, 0x01, 0x10, 0x80, 0x80, + 0x80, 0x80, 0x02, 0x22, 0x93, 0x01, 0x0a, 0x0c, 0x53, 0x65, 0x6c, 0x66, 0x54, 0x65, 0x73, 0x74, + 0x44, 0x61, 0x74, 0x61, 0x12, 0x3c, 0x0a, 0x07, 0x73, 0x61, 0x6d, 0x70, 0x6c, 0x65, 0x73, 0x18, + 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x22, 0x2e, 0x73, 0x65, 0x6e, 0x74, 0x65, 0x6e, 0x63, 0x65, + 0x70, 0x69, 0x65, 0x63, 0x65, 0x2e, 0x53, 0x65, 0x6c, 0x66, 0x54, 0x65, 0x73, 0x74, 0x44, 0x61, + 0x74, 0x61, 0x2e, 0x53, 0x61, 0x6d, 0x70, 0x6c, 0x65, 0x52, 0x07, 0x73, 0x61, 0x6d, 0x70, 0x6c, + 0x65, 0x73, 0x1a, 0x3a, 0x0a, 0x06, 0x53, 0x61, 0x6d, 0x70, 0x6c, 0x65, 0x12, 0x14, 0x0a, 0x05, + 0x69, 0x6e, 0x70, 0x75, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x69, 0x6e, 0x70, + 0x75, 0x74, 0x12, 0x1a, 0x0a, 0x08, 0x65, 0x78, 0x70, 0x65, 0x63, 0x74, 0x65, 0x64, 0x18, 0x02, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x65, 0x78, 0x70, 0x65, 0x63, 0x74, 0x65, 0x64, 0x2a, 0x09, + 0x08, 0xc8, 0x01, 0x10, 0x80, 0x80, 0x80, 0x80, 0x02, 0x22, 0xd7, 0x04, 0x0a, 0x0a, 0x4d, 0x6f, + 0x64, 0x65, 0x6c, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x3f, 0x0a, 0x06, 0x70, 0x69, 0x65, 0x63, + 0x65, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x27, 0x2e, 0x73, 0x65, 0x6e, 0x74, 0x65, + 0x6e, 0x63, 0x65, 0x70, 0x69, 0x65, 0x63, 0x65, 0x2e, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x50, 0x72, + 0x6f, 0x74, 0x6f, 0x2e, 0x53, 0x65, 0x6e, 0x74, 0x65, 0x6e, 0x63, 0x65, 0x50, 0x69, 0x65, 0x63, + 0x65, 0x52, 0x06, 0x70, 0x69, 0x65, 0x63, 0x65, 0x73, 0x12, 0x3d, 0x0a, 0x0c, 0x74, 0x72, 0x61, + 0x69, 0x6e, 0x65, 0x72, 0x5f, 0x73, 0x70, 0x65, 0x63, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, + 0x1a, 0x2e, 0x73, 0x65, 0x6e, 0x74, 0x65, 0x6e, 0x63, 0x65, 0x70, 0x69, 0x65, 0x63, 0x65, 0x2e, + 0x54, 0x72, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x53, 0x70, 0x65, 0x63, 0x52, 0x0b, 0x74, 0x72, 0x61, + 0x69, 0x6e, 0x65, 0x72, 0x53, 0x70, 0x65, 0x63, 0x12, 0x46, 0x0a, 0x0f, 0x6e, 0x6f, 0x72, 0x6d, + 0x61, 0x6c, 0x69, 0x7a, 0x65, 0x72, 0x5f, 0x73, 0x70, 0x65, 0x63, 0x18, 0x03, 0x20, 0x01, 0x28, + 0x0b, 0x32, 0x1d, 0x2e, 0x73, 0x65, 0x6e, 0x74, 0x65, 0x6e, 0x63, 0x65, 0x70, 0x69, 0x65, 0x63, + 0x65, 0x2e, 0x4e, 0x6f, 0x72, 0x6d, 0x61, 0x6c, 0x69, 0x7a, 0x65, 0x72, 0x53, 0x70, 0x65, 0x63, + 0x52, 0x0e, 0x6e, 0x6f, 0x72, 0x6d, 0x61, 0x6c, 0x69, 0x7a, 0x65, 0x72, 0x53, 0x70, 0x65, 0x63, + 0x12, 0x41, 0x0a, 0x0e, 0x73, 0x65, 0x6c, 0x66, 0x5f, 0x74, 0x65, 0x73, 0x74, 0x5f, 0x64, 0x61, + 0x74, 0x61, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x73, 0x65, 0x6e, 0x74, 0x65, + 0x6e, 0x63, 0x65, 0x70, 0x69, 0x65, 0x63, 0x65, 0x2e, 0x53, 0x65, 0x6c, 0x66, 0x54, 0x65, 0x73, + 0x74, 0x44, 0x61, 0x74, 0x61, 0x52, 0x0c, 0x73, 0x65, 0x6c, 0x66, 0x54, 0x65, 0x73, 0x74, 0x44, + 0x61, 0x74, 0x61, 0x12, 0x4a, 0x0a, 0x11, 0x64, 0x65, 0x6e, 0x6f, 0x72, 0x6d, 0x61, 0x6c, 0x69, + 0x7a, 0x65, 0x72, 0x5f, 0x73, 0x70, 0x65, 0x63, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, + 0x2e, 0x73, 0x65, 0x6e, 0x74, 0x65, 0x6e, 0x63, 0x65, 0x70, 0x69, 0x65, 0x63, 0x65, 0x2e, 0x4e, + 0x6f, 0x72, 0x6d, 0x61, 0x6c, 0x69, 0x7a, 0x65, 0x72, 0x53, 0x70, 0x65, 0x63, 0x52, 0x10, 0x64, + 0x65, 0x6e, 0x6f, 0x72, 0x6d, 0x61, 0x6c, 0x69, 0x7a, 0x65, 0x72, 0x53, 0x70, 0x65, 0x63, 0x1a, + 0xe6, 0x01, 0x0a, 0x0d, 0x53, 0x65, 0x6e, 0x74, 0x65, 0x6e, 0x63, 0x65, 0x50, 0x69, 0x65, 0x63, + 0x65, 0x12, 0x14, 0x0a, 0x05, 0x70, 0x69, 0x65, 0x63, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x05, 0x70, 0x69, 0x65, 0x63, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x73, 0x63, 0x6f, 0x72, 0x65, + 0x18, 0x02, 0x20, 0x01, 0x28, 0x02, 0x52, 0x05, 0x73, 0x63, 0x6f, 0x72, 0x65, 0x12, 0x48, 0x0a, + 0x04, 0x74, 0x79, 0x70, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x2c, 0x2e, 0x73, 0x65, + 0x6e, 0x74, 0x65, 0x6e, 0x63, 0x65, 0x70, 0x69, 0x65, 0x63, 0x65, 0x2e, 0x4d, 0x6f, 0x64, 0x65, + 0x6c, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x53, 0x65, 0x6e, 0x74, 0x65, 0x6e, 0x63, 0x65, 0x50, + 0x69, 0x65, 0x63, 0x65, 0x2e, 0x54, 0x79, 0x70, 0x65, 0x3a, 0x06, 0x4e, 0x4f, 0x52, 0x4d, 0x41, + 0x4c, 0x52, 0x04, 0x74, 0x79, 0x70, 0x65, 0x22, 0x54, 0x0a, 0x04, 0x54, 0x79, 0x70, 0x65, 0x12, + 0x0a, 0x0a, 0x06, 0x4e, 0x4f, 0x52, 0x4d, 0x41, 0x4c, 0x10, 0x01, 0x12, 0x0b, 0x0a, 0x07, 0x55, + 0x4e, 0x4b, 0x4e, 0x4f, 0x57, 0x4e, 0x10, 0x02, 0x12, 0x0b, 0x0a, 0x07, 0x43, 0x4f, 0x4e, 0x54, + 0x52, 0x4f, 0x4c, 0x10, 0x03, 0x12, 0x10, 0x0a, 0x0c, 0x55, 0x53, 0x45, 0x52, 0x5f, 0x44, 0x45, + 0x46, 0x49, 0x4e, 0x45, 0x44, 0x10, 0x04, 0x12, 0x08, 0x0a, 0x04, 0x42, 0x59, 0x54, 0x45, 0x10, + 0x06, 0x12, 0x0a, 0x0a, 0x06, 0x55, 0x4e, 0x55, 0x53, 0x45, 0x44, 0x10, 0x05, 0x2a, 0x09, 0x08, + 0xc8, 0x01, 0x10, 0x80, 0x80, 0x80, 0x80, 0x02, 0x2a, 0x09, 0x08, 0xc8, 0x01, 0x10, 0x80, 0x80, + 0x80, 0x80, 0x02, 0x42, 0x13, 0x48, 0x03, 0x5a, 0x0f, 0x2e, 0x2f, 0x73, 0x65, 0x6e, 0x74, 0x65, + 0x6e, 0x63, 0x65, 0x70, 0x69, 0x65, 0x63, 0x65, +} + +var ( + file_sentencepiece_model_proto_rawDescOnce sync.Once + file_sentencepiece_model_proto_rawDescData = file_sentencepiece_model_proto_rawDesc +) + +func file_sentencepiece_model_proto_rawDescGZIP() []byte { + file_sentencepiece_model_proto_rawDescOnce.Do(func() { + file_sentencepiece_model_proto_rawDescData = protoimpl.X.CompressGZIP(file_sentencepiece_model_proto_rawDescData) + }) + return file_sentencepiece_model_proto_rawDescData +} + +var file_sentencepiece_model_proto_enumTypes = make([]protoimpl.EnumInfo, 2) +var file_sentencepiece_model_proto_msgTypes = make([]protoimpl.MessageInfo, 6) +var file_sentencepiece_model_proto_goTypes = []any{ + (TrainerSpec_ModelType)(0), // 0: sentencepiece.TrainerSpec.ModelType + (ModelProto_SentencePiece_Type)(0), // 1: sentencepiece.ModelProto.SentencePiece.Type + (*TrainerSpec)(nil), // 2: sentencepiece.TrainerSpec + (*NormalizerSpec)(nil), // 3: sentencepiece.NormalizerSpec + (*SelfTestData)(nil), // 4: sentencepiece.SelfTestData + (*ModelProto)(nil), // 5: sentencepiece.ModelProto + (*SelfTestData_Sample)(nil), // 6: sentencepiece.SelfTestData.Sample + (*ModelProto_SentencePiece)(nil), // 7: sentencepiece.ModelProto.SentencePiece +} +var file_sentencepiece_model_proto_depIdxs = []int32{ + 0, // 0: sentencepiece.TrainerSpec.model_type:type_name -> sentencepiece.TrainerSpec.ModelType + 6, // 1: sentencepiece.SelfTestData.samples:type_name -> sentencepiece.SelfTestData.Sample + 7, // 2: sentencepiece.ModelProto.pieces:type_name -> sentencepiece.ModelProto.SentencePiece + 2, // 3: sentencepiece.ModelProto.trainer_spec:type_name -> sentencepiece.TrainerSpec + 3, // 4: sentencepiece.ModelProto.normalizer_spec:type_name -> sentencepiece.NormalizerSpec + 4, // 5: sentencepiece.ModelProto.self_test_data:type_name -> sentencepiece.SelfTestData + 3, // 6: sentencepiece.ModelProto.denormalizer_spec:type_name -> sentencepiece.NormalizerSpec + 1, // 7: sentencepiece.ModelProto.SentencePiece.type:type_name -> sentencepiece.ModelProto.SentencePiece.Type + 8, // [8:8] is the sub-list for method output_type + 8, // [8:8] is the sub-list for method input_type + 8, // [8:8] is the sub-list for extension type_name + 8, // [8:8] is the sub-list for extension extendee + 0, // [0:8] is the sub-list for field type_name +} + +func init() { file_sentencepiece_model_proto_init() } +func file_sentencepiece_model_proto_init() { + if File_sentencepiece_model_proto != nil { + return + } + if !protoimpl.UnsafeEnabled { + file_sentencepiece_model_proto_msgTypes[0].Exporter = func(v any, i int) any { + switch v := v.(*TrainerSpec); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + case 3: + return &v.extensionFields + default: + return nil + } + } + file_sentencepiece_model_proto_msgTypes[1].Exporter = func(v any, i int) any { + switch v := v.(*NormalizerSpec); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + case 3: + return &v.extensionFields + default: + return nil + } + } + file_sentencepiece_model_proto_msgTypes[2].Exporter = func(v any, i int) any { + switch v := v.(*SelfTestData); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + case 3: + return &v.extensionFields + default: + return nil + } + } + file_sentencepiece_model_proto_msgTypes[3].Exporter = func(v any, i int) any { + switch v := v.(*ModelProto); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + case 3: + return &v.extensionFields + default: + return nil + } + } + file_sentencepiece_model_proto_msgTypes[4].Exporter = func(v any, i int) any { + switch v := v.(*SelfTestData_Sample); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_sentencepiece_model_proto_msgTypes[5].Exporter = func(v any, i int) any { + switch v := v.(*ModelProto_SentencePiece); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + case 3: + return &v.extensionFields + default: + return nil + } + } + } + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: file_sentencepiece_model_proto_rawDesc, + NumEnums: 2, + NumMessages: 6, + NumExtensions: 0, + NumServices: 0, + }, + GoTypes: file_sentencepiece_model_proto_goTypes, + DependencyIndexes: file_sentencepiece_model_proto_depIdxs, + EnumInfos: file_sentencepiece_model_proto_enumTypes, + MessageInfos: file_sentencepiece_model_proto_msgTypes, + }.Build() + File_sentencepiece_model_proto = out.File + file_sentencepiece_model_proto_rawDesc = nil + file_sentencepiece_model_proto_goTypes = nil + file_sentencepiece_model_proto_depIdxs = nil +} diff --git a/convert/sentencepiece_model.proto b/convert/sentencepiece_model.proto new file mode 100644 index 0000000..370887a --- /dev/null +++ b/convert/sentencepiece_model.proto @@ -0,0 +1,333 @@ +// Copyright 2016 Google Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License.! + +syntax = "proto2"; + +// TODO(taku): Needs to use LITE RUNTIME in OSS release. +option optimize_for = LITE_RUNTIME; +option go_package = "./sentencepiece"; + +package sentencepiece; + +// TrainerSpec encodes a various parameters for SentencePiece training. +// Next id: 55 +message TrainerSpec { + /////////////////////////////////////////////////////////////////// + // General parameters + // + // Input corpus files. + // Trainer accepts the following two formats: + // A) Monolingual: plain text, one sentence per line. + // B) Bilingual: TSV, source sentence target sentence + // When bilingual data is passed, shared vocabulary model is built. + // Note that the input file must be raw corpus, not a preprocessed corpus. + // Trainer only loads the first `input_sentence_size` sentences specified + // with this parameter. + repeated string input = 1; + + // Input corpus format: + // "text": one-sentence-per-line text format (default) + // "tsv": sentence freq + optional string input_format = 7; + + // Output model file prefix. + // .model and .vocab are generated. + optional string model_prefix = 2; + + // Model type. only have UNIGRAM now. + enum ModelType { + UNIGRAM = 1; // Unigram language model with dynamic algorithm + BPE = 2; // Byte Pair Encoding + WORD = 3; // Delimitered by whitespace. + CHAR = 4; // tokenizes into character sequence + } + optional ModelType model_type = 3 [default = UNIGRAM]; + + // Vocabulary size. 8k is the default size. + optional int32 vocab_size = 4 [default = 8000]; + + // List of the languages this model can accept. + // Since the model is language-agnostic, this field is used as a reference. + repeated string accept_language = 5; + + // Size of self-test samples, which are encoded in the model file. + optional int32 self_test_sample_size = 6 [default = 0]; + + // Whether to use DP version of sentencepiece. Use it with TSV input format + // (requires precomputed word tab counts to work). + optional bool enable_differential_privacy = 50 [default = false]; + // Set these parameters if you need DP version of sentencepiece. + // std of noise to add. + optional float differential_privacy_noise_level = 51 [default = 0.0]; + // Clipping threshold to apply after adding noise. All the words with + // frequency less than this value are dropped. + optional uint64 differential_privacy_clipping_threshold = 52 [default = 0]; + + /////////////////////////////////////////////////////////////////// + // Training parameters. + // + // Uses characters which cover the corpus with the ratio of `chars_coverage`. + // This parameter determines the set of basic Alphabet of sentence piece. + // 1.0 - `chars_coverage` characters are treated as UNK. + // See also required_chars field. + optional float character_coverage = 10 [default = 0.9995]; + + // Maximum size of sentences the trainer loads from `input` parameter. + // Trainer simply loads the `input` files in sequence. + // It is better to shuffle the input corpus randomly. + optional uint64 input_sentence_size = 11 [default = 0]; + optional bool shuffle_input_sentence = 19 [default = true]; + + // Maximum size of sentences to make seed sentence pieces. + // Extended suffix array is constructed to extract frequent + // sub-strings from the corpus. This uses 20N working space, + // where N is the size of corpus. + optional int32 mining_sentence_size = 12 [deprecated = true]; + + // Maximum size of sentences to train sentence pieces. + optional int32 training_sentence_size = 13 [deprecated = true]; + + // The size of seed sentencepieces. + // `seed_sentencepiece_size` must be larger than `vocab_size`. + optional int32 seed_sentencepiece_size = 14 [default = 1000000]; + + // In every EM sub-iterations, keeps top + // `shrinking_factor` * `current sentencepieces size` with respect to + // the loss of the sentence piece. This value should be smaller than 1.0. + optional float shrinking_factor = 15 [default = 0.75]; + + // The maximum sentence length in byte. The sentences with the length + // larger than `max_sentence_length` is simply ignored. + // Longer input tends to bring the following risks: + // * Overflow during EM training (unigram language model only) + // * Performance drop because of O(n log n) cost in BPE. + optional int32 max_sentence_length = 18 [default = 4192]; + + // Number of threads in the training. + optional int32 num_threads = 16 [default = 16]; + + // Number of EM sub iterations. + optional int32 num_sub_iterations = 17 [default = 2]; + + /////////////////////////////////////////////////////////////////// + // SentencePiece parameters which control the shapes of sentence piece. + // + // Maximum length of sentencepiece. + optional int32 max_sentencepiece_length = 20 [default = 16]; + + // Uses Unicode script to split sentence pieces. + // When `split_by_unicode_script` is true, we do not allow sentence piece to + // include multiple Unicode scripts, e.g. "F1" is not a valid piece. + // Exception: CJ characters (Hiragana/Katakana/Han) are all handled + // as one script type, since Japanese word can consist of multiple scripts. + // This exception is always applied regardless of the accept-language + // parameter. + optional bool split_by_unicode_script = 21 [default = true]; + + // When `split_by_number` is true, put a boundary between number and + // non-number transition. If we want to treat "F1" is one token, set this flag + // to be false. + optional bool split_by_number = 23 [default = true]; + + // Use a white space to split sentence pieces. + // When `split_by_whitespace` is false, we may have the piece containing + // a white space in the middle. e.g., "in_the". + optional bool split_by_whitespace = 22 [default = true]; + + // Adds whitespace symbol (_) as a suffix instead of prefix. e.g., _hello => + // hello_. When `treat_whitespace_as_suffix` is true, + // NormalizerSpec::add_dummy_prefix will add the dummy whitespace to the end + // of sentence. + optional bool treat_whitespace_as_suffix = 24 [default = false]; + + // Allows pieces that only contain whitespaces instead of appearing only as + // prefix or suffix of other pieces. + optional bool allow_whitespace_only_pieces = 26 [default = false]; + + // Split all digits (0-9) into separate pieces. + optional bool split_digits = 25 [default = false]; + + // Defines the pre-tokenization delimiter. + // When specified, no pieces crossing this delimiter is not included + // in the vocab. Then the delimiter string is virtually ignored + // during the training. This field can allows constraints on the vocabulary + // selection. Note that this field is available on unigram mode. + optional string pretokenization_delimiter = 53 [ default = ""]; + + /////////////////////////////////////////////////////////////////// + // Vocabulary management + // + // Defines control symbols used as an indicator to + // change the behavior of the decoder. and are pre-defined. + // We can use this field to encode various meta information, + // including language indicator in multilingual model. + // These symbols are not visible to users, but visible to + // the decoder. Note that when the input sentence contains control symbols, + // they are not treated as one token, but segmented into normal pieces. + // Control symbols must be inserted independently from the segmentation. + repeated string control_symbols = 30; + + // Defines user defined symbols. + // These symbols are added with extremely high score + // so they are always treated as one unique symbol in any context. + // Typical usage of user_defined_symbols is placeholder for named entities. + repeated string user_defined_symbols = 31; + + // Defines required characters. Each UTF8 character in this string is included + // in the character set regardless of character_coverage value. Unlike + // user_defined_symbols, these characters have scores based on the frequency + // on input sentences, and the model can form subwords using characters + // in this field. + optional string required_chars = 36; + + // Decomposes unknown pieces into UTF-8 bytes. + optional bool byte_fallback = 35 [default = false]; + + // When creating the vocabulary file, defines whether or not to additionally + // output the score for each piece. + optional bool vocabulary_output_piece_score = 32 [default = true]; + + // `vocab_size` is treated as hard limit. Crash if + // the model can not produce the vocab of size `vocab_size`, + // When `hard_vocab_limit` is false, vocab_size is treated + // as soft limit. Note that when model_type=char, + // always assumes hard_vocab_limit = false. + optional bool hard_vocab_limit = 33 [default = true]; + + // use all symbols for vocab extraction. This flag is valid + // if model type is either CHAR or WORD + optional bool use_all_vocab = 34 [default = false]; + + /////////////////////////////////////////////////////////////////// + // Reserved special meta tokens. + // * -1 is not used. + // * unk_id must not be -1. + // Id must start with 0 and be contiguous. + optional int32 unk_id = 40 [default = 0]; // + optional int32 bos_id = 41 [default = 1]; // + optional int32 eos_id = 42 [default = 2]; // + optional int32 pad_id = 43 [default = -1]; // (padding) + optional string unk_piece = 45 [default = ""]; + optional string bos_piece = 46 [default = ""]; + optional string eos_piece = 47 [default = ""]; + optional string pad_piece = 48 [default = ""]; + + // Encodes into U+2047 (DOUBLE QUESTION MARK), + // since this character can be useful both for user and + // developer. We can easily figure out that is emitted. + optional string unk_surface = 44 [default = " \xE2\x81\x87 "]; + + // Increase bit depth to allow unigram model training on large + // (>10M sentences) corpora. A Side-effect of enabling this flag + // is increased memory usage. + optional bool train_extremely_large_corpus = 49 [default = false]; + + // Path to a seed sentencepieces file, with one tab-separated + // seed sentencepiece frequency per line. + optional string seed_sentencepieces_file = 54 [default = ""]; + + // Customized extensions: the range of field numbers + // are open to third-party extensions. + extensions 200 to max; +} + +// NormalizerSpec encodes a various parameters for string normalizaiton +message NormalizerSpec { + // name of normalization rule. + optional string name = 1; + + // Pre-compiled normalization rule created by + // Builder::GetPrecompiledCharsMap() or Builder::CompileCharsMap() method. + // Usually this field is set by Builder::GetNormalizerSpec() method. + optional bytes precompiled_charsmap = 2; + + // Adds dummy whitespace at the beginning of text in order to + // treat "world" in "world" and "hello world" in the same way. + optional bool add_dummy_prefix = 3 [default = true]; + + // Removes leading, trailing, and duplicate internal whitespace. + optional bool remove_extra_whitespaces = 4 [default = true]; + + // Replaces whitespace with meta symbol. + // This field must be true to train sentence piece model. + optional bool escape_whitespaces = 5 [default = true]; + + // Custom normalization rule file in TSV format. + // https://github.com/google/sentencepiece/blob/master/doc/normalization.md + // This field is only used in SentencePieceTrainer::Train() method, which + // compiles the rule into the binary rule stored in `precompiled_charsmap`. + optional string normalization_rule_tsv = 6; + + // Customized extensions: the range of field numbers + // are open to third-party extensions. + extensions 200 to max; +} + +// Proto to store samples for self-testing. +message SelfTestData { + message Sample { + optional string input = 1; + optional string expected = 2; + } + repeated Sample samples = 1; + + // Customized extensions: the range of field numbers + // are open to third-party extensions. + extensions 200 to max; +} + +// ModelProto stores model parameters. +// SentencePieceProcessor is supposed to be self-contained. +// All settings/parameters which may change the behavior must be encoded +// in ModelProto. +message ModelProto { + message SentencePiece { + enum Type { + NORMAL = 1; // normal symbol + UNKNOWN = 2; // unknown symbol. only for now. + CONTROL = 3; // control symbols. , , <2ja> etc. + USER_DEFINED = 4; // user defined symbols. + // Typical usage of USER_DEFINED symbol + // is placeholder. + BYTE = 6; // byte symbols. Used when `byte_fallback` is true. + UNUSED = 5; // this piece is not used. + } + optional string piece = 1; // piece must not be empty. + optional float score = 2; + optional Type type = 3 [default = NORMAL]; + + // Customized extensions: the range of field numbers + // are open to third-party extensions. + extensions 200 to max; + } + + // Sentence pieces with scores. + repeated SentencePiece pieces = 1; + + // Spec used to generate this model file. + optional TrainerSpec trainer_spec = 2; + + // Spec for text normalization. + optional NormalizerSpec normalizer_spec = 3; + + // Stores sample input and its expected segmentation to verify the model. + optional SelfTestData self_test_data = 4; + + // Spec for text de-normalization. + optional NormalizerSpec denormalizer_spec = 5; + + // Customized extensions: the range of field numbers + // are open to third-party extensions. + extensions 200 to max; +} diff --git a/convert/tensor.go b/convert/tensor.go new file mode 100644 index 0000000..ffb22ea --- /dev/null +++ b/convert/tensor.go @@ -0,0 +1,56 @@ +package convert + +import ( + "iter" + "slices" + "strings" + + "github.com/ollama/ollama/fs/ggml" + "github.com/pdevine/tensor" + "github.com/pdevine/tensor/native" +) + +// splitDim splits a tensor along a specified dimension into multiple tensors. The dimension +// is split evenly based on the number of replacers provided. +func splitDim(t Tensor, dim int, replacers ...*strings.Replacer) iter.Seq[*ggml.Tensor] { + return func(yield func(*ggml.Tensor) bool) { + for i, replacer := range replacers { + shape := slices.Clone(t.Shape()) + shape[dim] = shape[dim] / uint64(len(replacers)) + + slice := slices.Repeat([]tensor.Slice{nil}, len(shape)) + slice[dim] = tensor.S(i*int(shape[dim]), (i+1)*int(shape[dim])) + + tt := t.Clone() + tt.SetRepacker(func(_ string, data []float32, shape []uint64) ([]float32, error) { + dims := make([]int, len(shape)) + for i := range shape { + dims[i] = int(shape[i]) + } + + var t tensor.Tensor = tensor.New(tensor.WithShape(dims...), tensor.WithBacking(data)) + t, err := t.Slice(slice...) + if err != nil { + return nil, err + } + + t = tensor.Materialize(t) + // flatten tensor so it can be written as a vector + if err := t.Reshape(t.Shape().TotalSize()); err != nil { + return nil, err + } + + return native.VectorF32(t.(*tensor.Dense)) + }) + + if !yield(&ggml.Tensor{ + Name: replacer.Replace(t.Name()), + Kind: t.Kind(), + Shape: shape, + WriterTo: tt, + }) { + break + } + } + } +} diff --git a/convert/testdata/Meta-Llama-3-8B-Instruct.json b/convert/testdata/Meta-Llama-3-8B-Instruct.json new file mode 100644 index 0000000..808826b --- /dev/null +++ b/convert/testdata/Meta-Llama-3-8B-Instruct.json @@ -0,0 +1,313 @@ +{ + "general.architecture": "llama", + "general.file_type": "1", + "general.quantization_version": "2", + "llama.block_count": "32", + "llama.context_length": "8192", + "llama.embedding_length": "4096", + "llama.feed_forward_length": "14336", + "llama.rope.dimension_count": "128", + "llama.rope.freq_base": "500000", + "llama.vocab_size": "128256", + "llama.attention.head_count": "32", + "llama.attention.head_count_kv": "8", + "llama.attention.layer_norm_rms_epsilon": "1e-05", + "tokenizer.ggml.model": "gpt2", + "tokenizer.ggml.pre": "llama-bpe", + "tokenizer.ggml.bos_token_id": "128000", + "tokenizer.ggml.eos_token_id": "128009", + "tokenizer.ggml.merges": "d0cbac1fcc9dcf03724b8db5c9bfb593ae1cf68fb9bc72eb1d15274dcbbf618b", + "tokenizer.ggml.token_type": "d70a88809fd7da6f1f028622685cd64268a7a922c5d343c96f25b66327358978", + "tokenizer.ggml.tokens": "765b529dbcbc42dd202ce657341c63807b51f3b07e09898f6aa6196326865d5a", + "token_embd.weight": "b53102a11d9064bbd404833e3464b1b13e08ce73300b442312cccde2f19b2698", + "blk.0.attn_norm.weight": "7318df3cca9e8d153ff0a503026a1265e63d20b2a8c1dd7a2769585082b5d1ee", + "blk.0.ffn_down.weight": "b950806a1fc722c9fad7fd0b20c3c0a7fb50f14395e1e7663a590bfd62e20900", + "blk.0.ffn_gate.weight": "e73e580af6d4f08e060a74a3c25efdf5d3bed99e183d95a5a85ae859014839fd", + "blk.0.ffn_up.weight": "c8158af679ef99746da1befb67eebb19489e0bbe6ce7d97e13e348508244e516", + "blk.0.ffn_norm.weight": "7ec69c3c31e95e49a3359003b0033f6b9e85561a3e3fd83e7476661ecdd756bb", + "blk.0.attn_k.weight": "2732303257bac969b4964e0e32ec08b5a7f5c031bb02bf6ac4467b3ea0ebcf1e", + "blk.0.attn_output.weight": "ecda1d43b4ccc91cd5b366d7e7a275353990ac78561a07c83d9c77031aba12dc", + "blk.0.attn_q.weight": "569b1f5faf92b6f00910cf7effb2d5862f91038ce5c3b0019fc10e5d79fbd5e1", + "blk.0.attn_v.weight": "aa8416c5ef7e32fb54a1f20d6ac651656845d4af240564b397c39bd83e06e3b8", + "blk.1.attn_norm.weight": "03327e02862908c2a44b2f52decdb924bf4201f400b46f8037a9cb2e1d7a61ff", + "blk.1.ffn_down.weight": "5a83a87603f38c99f8e1e370a2d5f967bb45ac51d881a609304a7811027321e0", + "blk.1.ffn_gate.weight": "31da0572c79e655186c721c231376f85e56cdcc6257c28d08c8c5b40d5c22b40", + "blk.1.ffn_up.weight": "e0c811d64ca155c8de10a868e72015d43888834804614ee1aa2953129ffbc90f", + "blk.1.ffn_norm.weight": "5861f313d6137d6f0f904d423df47fffc6069e224ff746e1b637ac9c7f0af862", + "blk.1.attn_k.weight": "5fbbec0acca6457b9416ebdcd90e526885d0224537b7628f6be376a7f275313d", + "blk.1.attn_output.weight": "b237c9763fa3f75166a6f70b70f1566e77d0d89dfa164ed1b3137393e90575c3", + "blk.1.attn_q.weight": "c0a9cf4a98b4882b16f3eb2b49d933793dcc5357abb246fd3fe3134ed2b12e1c", + "blk.1.attn_v.weight": "96867111727200cac1af7865189dd41fd62b47584e5e5f33a91f1d34509cbd40", + "blk.2.attn_norm.weight": "f392f8a88ee3a95b1cc19c40dd4ef66317037b0faaa1800f610779e129ee0539", + "blk.2.ffn_down.weight": "73823eef46632aedcc8c1cb08a736b6aa97ca97842cd1fdfc5567d8dec459662", + "blk.2.ffn_gate.weight": "f4909ae19fc3848b00bb8b9050122e74f8e903b89e22937036f4cc9fea20a718", + "blk.2.ffn_up.weight": "16f4904a3d814ea68f00519724fc4943e48444a84c786bda39aa5efc298a7d84", + "blk.2.ffn_norm.weight": "e3ccdf56e75cb969f6f69c39caf6daf7c4e70e89e25df0f4d2e4bc60e159aafe", + "blk.2.attn_k.weight": "c3beb1e0a11bcf007ef0f0d8f6bdd3082d8b29090cd29597846b5d51e308a8e5", + "blk.2.attn_output.weight": "bb9f66c32cff51154fea92933c2cd62549236f8cb1a767f9ef28d3f99809b343", + "blk.2.attn_q.weight": "8eba394132eef2a05c5a92d62d2376000f7948448d7a2dc74e6b608203add20d", + "blk.2.attn_v.weight": "88f61f77c53567c617db3eef8f30621109a750e679f6784f7911739bd42c2f02", + "blk.3.attn_norm.weight": "7b996675b7ca75fa24107b3ebe0788653ede0f49ac83b8659d71ff54d591f81a", + "blk.3.ffn_down.weight": "2cb332bc05e4821962fdc9dcbcc7cc12630f32117711b687d18fb53c0bc4fbf4", + "blk.3.ffn_gate.weight": "340b387c7f208c8f0a6db904ef8d87c1e84b7d6ad57177abd32d86c8d18b760f", + "blk.3.ffn_up.weight": "07484433f8a7ee061c55aa0de2ecc009f769b0617c9c0ec096e9bb2946df9f0e", + "blk.3.ffn_norm.weight": "4f1a4ade36b393af341240bc894a2aab09cff7e4d56dc4658445deb107f9371b", + "blk.3.attn_k.weight": "483dcd96acb4528df84b9842970994630dbd82b8715ace394aa8b39fcf8d6291", + "blk.3.attn_output.weight": "beaff0810687923585642ee11d929cbf3b43dc6f87f30ddb552c222ab57bdbb3", + "blk.3.attn_q.weight": "0739355002f6fce520863add697e0ff25fc88215322dc3f993be7bb68dcce7e8", + "blk.3.attn_v.weight": "c216d17b6d90ee3e07f82598b8161fae34de2f392dbb0f745b682b578c324767", + "blk.4.attn_norm.weight": "91ab405bc4ba15bf63af233f266aa43aaab43789a9e6596e14a357c2ac7df217", + "blk.4.ffn_down.weight": "620f34ee75cdc73aecb8949af5fbb0d2437fd81422b6d8eb7acfc52addb9fc68", + "blk.4.ffn_gate.weight": "f6feec7bc9acadf35ec22532f8998d8e50f31afedabb19263590dcf8b9a92eee", + "blk.4.ffn_up.weight": "4a72af7cd28fd07b038f6cc4406678d120517280236ea85d9e76eff40ab2cc22", + "blk.4.ffn_norm.weight": "1805b37b44d5d682bdbd2fadeafb763ee001617d7870848cc487079ee34b21f9", + "blk.4.attn_k.weight": "a1e4f9d97cdf4c1b0d177cf00c4e32d1be30c1984a239b3c9bd73f8848888853", + "blk.4.attn_output.weight": "a1547e2497c423b0aff0eee71d9300d6fdf4e4986679418b6e637b69a9a6720b", + "blk.4.attn_q.weight": "0677483a9264ea6803d03d304d87a54632242cb516e8b76b6e3e8284c2f4de04", + "blk.4.attn_v.weight": "02691ba3af344fcc1969428ab0df811ac94aaa2fd91b0dc4ec1ac0a58806980d", + "blk.5.attn_norm.weight": "ba9c028335e5c895b87a5bd1448ca429248f9746ed97bdcb8679923206117156", + "blk.5.ffn_down.weight": "ccfdc9006acad1940a6bc05042a3947f1066acd671e0bb53b7684e9eea9ef5c9", + "blk.5.ffn_gate.weight": "623157679f1e742ccc3807c0b0153ddc8450104de75ec62f1370ec3807c09cf4", + "blk.5.ffn_up.weight": "05748804c65091f963729b58b085f58351891cac8a2861f5eae26b06aa60b2a0", + "blk.5.ffn_norm.weight": "84bae55af2efc8b8429f09056c8c04990c466dae31cb3f9356038b8957f1b406", + "blk.5.attn_k.weight": "8c766180c726b037d587fc52371de6e3307140c52409011609d1225624b6a3eb", + "blk.5.attn_output.weight": "490b582b3b1dc151ae55aee8b6743dad6c01fb49e43afefb6e68394b74be3d73", + "blk.5.attn_q.weight": "6f7b8ca4d9025ec836a44bbcca46be30c66b471a9fb62943ddff8288b3731409", + "blk.5.attn_v.weight": "9f70df3ba00c9e723214b3da83ff435a2163fff5915f75515c9664c05c866c27", + "blk.6.attn_norm.weight": "1a4a66613a682df6f061fc7c4d986f9f7e9175b62f0c42fc1ef31db536bd5942", + "blk.6.ffn_down.weight": "c56f25e4e49b443dbc82d88311ee63bc1f5002cc67e52f4787fd5f003aedeac1", + "blk.6.ffn_gate.weight": "31a5cf1aa9b831a81588d508550f51fc425f9517c43254d4ef7096d38029cf04", + "blk.6.ffn_up.weight": "ce135f3a1163e0c9297a615bdbe68a67ead21edce8debbfa9f6e15e6af8d4c94", + "blk.6.ffn_norm.weight": "4e328ce0648c94e732bc40501858ef6262ad1161e2e407b0cdcf4813fa9d45d8", + "blk.6.attn_k.weight": "1eb1c4c9f9c4c7ff7f5429075e0dc6a7782bed55109fa88df209a817dd8ef960", + "blk.6.attn_output.weight": "3d32986b56873b88655ee1edabdd413fdd9ab18b82108c9ce90bdbc2d3a6f3a3", + "blk.6.attn_q.weight": "8432f583b3a2809c99c393f9beb077cb0534dd5d247c17108f2986cadc6651f6", + "blk.6.attn_v.weight": "5045381513815bb91839dbac8335ffe49bbc7b0008369de7ea97eb676c5e2b36", + "blk.7.attn_norm.weight": "3dabd003638ec2499bfc8a48c49eef34276caab4fe76894eb963207848c2fdaf", + "blk.7.ffn_down.weight": "194fae858608bdcffd235be59ab119d0b91c8549f864ea06dae69249e099935f", + "blk.7.ffn_gate.weight": "00b24c29c30246892bce0791be804a89701d4c1332777e0bcdad5d9d5666604f", + "blk.7.ffn_up.weight": "44d7082a5280080c90cef9e19d410391de34f212ca0736377769b8ddd0c82d5e", + "blk.7.ffn_norm.weight": "21fe8a7fd6911c64e0d15a788b3b4cb6d71dd6ec51de65f760ee89afbb6ae53e", + "blk.7.attn_k.weight": "57a149eec5f6744a9526cd3925ac073f9d12db0fbcb5afe042ef4dc846458c44", + "blk.7.attn_output.weight": "0e9c28a3e81a2880251ce5eed77bcb8be8aaa1a51c9cb6de820b47ed83849fc2", + "blk.7.attn_q.weight": "15ee75263ee4e2a43eb322bc159ae004bb7d77e3a7e63ee4ddab700430693fff", + "blk.7.attn_v.weight": "440aa970bba4bff429fd7b7b1de21f2ad14fb2952b776cfa4acee68d7c6e9b8f", + "blk.8.attn_norm.weight": "af5b44825633c42c1ae964c82bb2be6a242d3a751f0a91f1bae4f593e8f5b6ec", + "blk.8.ffn_down.weight": "b11c14c76adca94fa200496dd2c10743becb23aab6642443ef1ae6d8710edbc1", + "blk.8.ffn_gate.weight": "7bb03d3325bf8637ae2fa1296b0651356515578d46a7c5ca65c7a923d7de27bc", + "blk.8.ffn_up.weight": "b956ef0a0669b5a9c9bf3a8da2d1c24f52d331cfb7354f6d7c51bd65be355e30", + "blk.8.ffn_norm.weight": "c78c3d748302edfef76f71ea5cb2055c94352122eee8b9b1173779a1814d224e", + "blk.8.attn_k.weight": "c0fba6a596ed9c1c32a7055c31a935a8b31e42b77282ee47c1f03ee3bde736b5", + "blk.8.attn_output.weight": "83cf9947080c5d8d571f04a842bc3dcfe7bbb0195fb25b346e22635e8649f2d4", + "blk.8.attn_q.weight": "47409350a576b333d97b7c877d69f47f46df504f3765102dfc0be9e521c7ecd6", + "blk.8.attn_v.weight": "1999dff91404fdcf1ecb34d9eaaaa9244ec7658a74dec8feb7cfd1fddba0347e", + "blk.9.attn_norm.weight": "1e6e29d5c3889ab4e1b0a5b9998cba60179b0f1fca133515df49cbc19d092593", + "blk.9.ffn_down.weight": "acb898a6490adff592e10b4c62d70edc5941661ee6da44658500e9205357c8e9", + "blk.9.ffn_gate.weight": "4cff63013593aadc3ffbaaa6ed70ffdba1224cd43c3644bf6f4162b5ac1ab542", + "blk.9.ffn_up.weight": "f985b5a2d6cf4fe32c7256301c3c89b8ad22b59e516342c52da42d8110766a4e", + "blk.9.ffn_norm.weight": "0d659c538bc6b21ed0018f107ab674a7424a00a42946c80e07208b479b21918f", + "blk.9.attn_k.weight": "f67611d888780d1b38c1c146b361c65310c8183bdf64fd73e2259985c6e8517f", + "blk.9.attn_output.weight": "f12ca1fa62a02ddc3f77f798bfb5707e0c50bf18ee0eaa67025521a98355f26b", + "blk.9.attn_q.weight": "3865185f4361a645b086ad47b72904c095313fb1c624e511647bf1a7dfc1c476", + "blk.9.attn_v.weight": "92125bbfed63544ab56052bd1e4aa453bbf34c795249ee54cde54907c8c6d1d3", + "blk.10.attn_norm.weight": "5d6bfbe545bcc2fcb2fc75c68f64b1f4c918badaf53e0156fe2d88aa977b2f94", + "blk.10.ffn_down.weight": "1dd9da8b0d2696ab5531fbca8a29c7d67567620a9d3e5fc2a19ec5d7e4c6cc8a", + "blk.10.ffn_gate.weight": "6e55e7f014edaebda0ac6819a426221d3b025c27312a2e18cc5806f31e3db226", + "blk.10.ffn_up.weight": "d80dde54af5db51241345ee8d64c1972608644f4deeac1e8195dc423bf27474a", + "blk.10.ffn_norm.weight": "f6ca65951d58ae3379eee8247bec34ebd0db05674cc9295593573841b8a55df3", + "blk.10.attn_k.weight": "b58e350bd6b49aba0fba4e4dd6865de3a2a0651ab865dbf2419b627b53ffc187", + "blk.10.attn_output.weight": "6b26a986e12fe66ec286a21d7d5af5eaa1bfe6f2bf502165d270e4497235a54a", + "blk.10.attn_q.weight": "3440e0e5b7e0d1e426424ae5a33f4e057be623249e9035ea12e57dbe5d3893c4", + "blk.10.attn_v.weight": "ebfadcfe14bcd6dee933053df0a67e12e7a196d5cc45728c1ffb2a2daedd5ca2", + "blk.11.attn_norm.weight": "3ed057b9576cd2de84507ef64c7646dc478c651efca4c2024cbe91a4f3fbf0bc", + "blk.11.ffn_down.weight": "8ff1c2487d22f5c499761e4eb721418f141f960160d0bab779595a34e4d68898", + "blk.11.ffn_gate.weight": "9c74e4507c7e45bf39b7cc7402198cd1dd77e3fff8c625b0413acaeb16efeb9f", + "blk.11.ffn_up.weight": "4367158007161d29939e00a322bb6776016e43f648a94f9b08a96a477aae75be", + "blk.11.ffn_norm.weight": "1cc0288c1491072121f4c9a0af20be0e13af49895696a3320e4fcac608768de3", + "blk.11.attn_k.weight": "066f5b3c144fce1366835e1ebf376f768b333b8ae29f5b478c42d1d0c809c855", + "blk.11.attn_output.weight": "e0d9f3d3f2c54aed59c02713ea4fb562799ddbacbe67ca3998dfc887bc44e47b", + "blk.11.attn_q.weight": "28d3ecc8a88cb3815e89a7f7a7d043da7a71f702b337a126e4d3a2ac1cd6370f", + "blk.11.attn_v.weight": "7c5cdef10ee73bca0a3b9f6ece5f0a0155664e0ce3d8de90ccdccfab5545e5e7", + "blk.12.attn_norm.weight": "973b133301a1af760cd7b3a7955371ea0a750808b442deb6adaf7b98482bd0c6", + "blk.12.ffn_down.weight": "d6c87b4b4ca03f75546ddd6a9e7fca720585a309188723c1ace8122438d4b200", + "blk.12.ffn_gate.weight": "2189a6e0cab1540bd05d6089b922aa8fd694be51255654933c165f302a0c955f", + "blk.12.ffn_up.weight": "5affbec19b58d092b9305721e3552481fe2eff51269ea3ed91cda3b9ef84d4df", + "blk.12.ffn_norm.weight": "f650fd42a34e950f758b4a130e7b8b1a712b1dcbede0291bb8edde47aaed0ef6", + "blk.12.attn_k.weight": "59b1e86f10450a7cc188beefc0856d2dcf44e8d7fdd9cd8859c30ec1ebaf24b6", + "blk.12.attn_output.weight": "446b0d36b2f66bd72a2323f4f4e9d85a0f621e9a58872e89a27248d6b1123238", + "blk.12.attn_q.weight": "3ed6bfd39f040301ed99fad882d3e569769d594259f9948445bef0e44ec881fb", + "blk.12.attn_v.weight": "e73652cd5d0029b1931be3ba9d82508f6696dce5a29d085476a54fb7a2ddbabc", + "blk.13.attn_norm.weight": "491b85278c0bd67bd31b9b8a9720902c244bd067e53a4a03641b7c0994782e82", + "blk.13.ffn_down.weight": "ad71cc248a85e9ced49307a24a9bfae01d387e979a7689c82ff59998e09741f3", + "blk.13.ffn_gate.weight": "0a55984d53971fab97575ee0ef5882013be7fdecfa76e3fbebb5dc85a07a14d4", + "blk.13.ffn_up.weight": "378b697b35e2e53c0de98e8e29b73d42ae3ec112ec16129aa5997a9e2f3b5943", + "blk.13.ffn_norm.weight": "f8aff2f69ab286210fad45a62b03f8d10b38f96a420d7baadf6b95d7b0b0bcd2", + "blk.13.attn_k.weight": "25ceb841afb1034831bea7f4d6a6c578def2ce4d4c412c780ef147dc9a598360", + "blk.13.attn_output.weight": "a242b322889c6bdaa14b67a7bab593db39df8eea3721638ef639abbb74d482e3", + "blk.13.attn_q.weight": "d80be9945a369439e835c55cfb0e97828b8a66bb7ced534d9059c92487bf20a9", + "blk.13.attn_v.weight": "ac33274cf9b67979d9ecdc967a55175afe0c9c4aeeff6391433cd9840c818706", + "blk.14.attn_norm.weight": "12a1e1091de5b2da12c9e7c0b1c8e6f09ce2a749733cf7d5240445b8e21cd093", + "blk.14.ffn_down.weight": "cfd41965c88266e32bc2dcdadda512499c35519e8686fefb9a7f249ab2291eb5", + "blk.14.ffn_gate.weight": "8dcfe774f07a095c7c6cf0a901c9df70d938bad7b5ba347fbc8f694e7603c0d1", + "blk.14.ffn_up.weight": "c7995577fe4a72ea0fb17c4a7b6b87b959072bbfdd5edacc6c367d43465809ae", + "blk.14.ffn_norm.weight": "81c41ebde41739e7016ffec31d2256217b825dc3cae049a935f5f61a60d22003", + "blk.14.attn_k.weight": "fb708bdebe4384f5c4b479c110028554f4d122f166b8091eda7d8d65e6780eb8", + "blk.14.attn_output.weight": "f5295caf2dfdc60553dcabe17537a80577e8b153c902247daac058df23542514", + "blk.14.attn_q.weight": "c12b7a3601c68c63ab5dc9d2599ebf3f3a10abc2c59d3a2126fffd5818f2763b", + "blk.14.attn_v.weight": "1ce968d9149bf0d5e237d52cc6d6433565b4bbf03252a736262bb00a2b34a687", + "blk.15.attn_norm.weight": "266fd2c36d7dcefc6b6bb7f1c9374c41f2bab5d6c84a063b6f91c4f682dad3c4", + "blk.15.ffn_down.weight": "6154886e9ef0a6cc08ab0d264a35f497e6f0987efdac992ed04e87088bea7801", + "blk.15.ffn_gate.weight": "183d9fd3c1b5657840099053d2fd3f72ad953b1de523296159b7761f20491a76", + "blk.15.ffn_up.weight": "51546d4498842ae2340ee226a0888d5f61e7d2ca4d052dfa06a77b0451242d3d", + "blk.15.ffn_norm.weight": "ef7378091a41a25a5f58bf1bf9d3bc64ea562e7f421e1c232b1f177c30fd3500", + "blk.15.attn_k.weight": "8d556ab8d9639324141774999b6eed0e91d7ee645bf3e7a3dcd200b2e7a00751", + "blk.15.attn_output.weight": "54aa6ba87def7cbe18b0c6ab3aff5c351cb3b6ca4a0d7b2cd5f75a1312991429", + "blk.15.attn_q.weight": "10731b0dc031ea8e0ef37bd7f010e0a78518a10a6df05a8bae48e3148b73ef3e", + "blk.15.attn_v.weight": "cbbe50c2ed7224866d3cf9b489c599f3ec41a4ea1aa3181e9f4e87e1fa0cefec", + "blk.16.attn_norm.weight": "387058eb39d4b28c04cf1368247417f1faeae8ae79d894c9f293457e0eaa00b0", + "blk.16.ffn_down.weight": "2cb26ccee585e933401ad5c82ed36ddacb3289efa0b28f8cf91b020ffbd9c333", + "blk.16.ffn_gate.weight": "d745985efb5bab42304e5d509024631efe35f92f2b2ec4931ead6db97ca9727e", + "blk.16.ffn_up.weight": "7a67bd195e0642828ca36eb7818149bb70c2c25f82de07e2b5807c520daf540e", + "blk.16.ffn_norm.weight": "7cefd061c8182482a89272f8a4e88a954b12609a62716923ca1cb3593b1c1651", + "blk.16.attn_k.weight": "d7968a2de67e755b4533e061aaad1cb62f8882af92dcad67f99d6d5112513439", + "blk.16.attn_output.weight": "9e9ab5788272ca3394ea89eadbce8c86ecc3fd75b7899184d6191c134ad9aae0", + "blk.16.attn_q.weight": "ef81c261b536c1a3a093b33f44cf2d42b86e5aa2d821674f07a0c80e992ed925", + "blk.16.attn_v.weight": "aef38e7958301b4a437cbdd2fbae6197f677b09269ec1eaf63188cd5da428d25", + "blk.17.attn_norm.weight": "28f6b289f1bc3131041e9f791b7a2a3a48baee0dfea27bf7051ebbb7ed364d80", + "blk.17.ffn_down.weight": "1a502829aafc6a9bd6bc81f12573bf8632d5c8c659f0dfb13c8b2411f3b1ec05", + "blk.17.ffn_gate.weight": "ddfd8aa0eb98846ebc9afe31366249159f46ae9815199dd70161527ed241ac4d", + "blk.17.ffn_up.weight": "4211a3cc247071bd361b30de2131d02382f552855062bf3b3e004c17992e5d09", + "blk.17.ffn_norm.weight": "647e5fa99a5b0d232af36d15816539f4d27e60a50a341b00aa88bb6e4474f8b9", + "blk.17.attn_k.weight": "d9125ff33a19c502c0f8846433ffc24395048582fc2f463d34a0301a82156f02", + "blk.17.attn_output.weight": "3d64fbb1cfef04444827f37c35fd9ad3413eb2165094d339ef89f00503f09de4", + "blk.17.attn_q.weight": "e5b29424028f578beca385fd82e29f37adedf3037cd51e5889d5a1ffb0428ca7", + "blk.17.attn_v.weight": "1809c5aaf2ac04c5d65539097564ad62796e87d24bb8b9ce5b095561a61d908a", + "blk.18.attn_norm.weight": "99daca58d001c627523d3adfbca1d95f04e590382a326866544d57989d5f4835", + "blk.18.ffn_down.weight": "84f30231ce6ca0f10227541dfc602d6418c1a210386b0c4926ef1656e7d4635c", + "blk.18.ffn_gate.weight": "ca5bbe4468b541740e54f69b9e08fcc8e478c344b70551dab21b1206acfbaadb", + "blk.18.ffn_up.weight": "0b3067b9dded31686dcfdc1e247eae3974a28a61ac59e9862758dbfaad64e8f7", + "blk.18.ffn_norm.weight": "8154a102232dbc0f90ce77ae5c1ff8f26f8b6e4dcf326e9ec1645749669e7960", + "blk.18.attn_k.weight": "25abb26021ccc481471a30e0d4cbeb7e1db29828417ec5136edeb93fecf09ac4", + "blk.18.attn_output.weight": "d87d481d9b046b68efa06ccdd4ed8cbf61e692d61114b75b7fad5ed75f5d87b2", + "blk.18.attn_q.weight": "cc6400379e15766992ff1293be79dc67682c28e9e15155a78109f4b64653b164", + "blk.18.attn_v.weight": "45c75cb1dd496aea3173aafe2575b841dd1d02cbe010b3198099731eb98f531c", + "blk.19.attn_norm.weight": "65389efc75297684773284ef8e5f8789a4504b636c9f33b8a32e0ee42499fa72", + "blk.19.ffn_down.weight": "4eefab7e939f64a17e4a214ca3c77a6fa110d94f677e2d6401086f70fc538b04", + "blk.19.ffn_gate.weight": "f1c0a59cafda66f466ab585b0b8b4861b58abe87a67cea1f6a488492242edfdf", + "blk.19.ffn_up.weight": "c42d045eef588db4a0e56960a57e110e1ff92eb8041107d19899165fd3b90f17", + "blk.19.ffn_norm.weight": "a8f33eda6d5d62ff5f333ad9771783caff556641f4e7df713451385676f441fa", + "blk.19.attn_k.weight": "0bab5d9e9083492bfb05a5a3bb23b79c0e7b99ef6a6644817b4d57d5c453b8a5", + "blk.19.attn_output.weight": "c99c551d70eafad0f7aea98fb6f9251635897168eb3895f76abf0d4ea3b3aa6f", + "blk.19.attn_q.weight": "c98bde95627c3b54c9443813ca50b4e14f518319681db6bbf7b2332ba26e9a60", + "blk.19.attn_v.weight": "ff3a490518cf64904db89ce0dc7d6eb89e870f1440e41883c6b55a221f82de84", + "blk.20.ffn_gate.weight": "761f0e317229cafe9d3754048ab038a0a84e9a287b196ab65f633139f2d29aba", + "blk.20.attn_k.weight": "45d13439b41066d282e8490a726785abf513605f46c79bd0c840f6419d27e790", + "blk.20.attn_output.weight": "a3b958d84b4a097844179b7d55c18fd0e4f319cb15e918c6fde33b68de1bcac6", + "blk.20.attn_q.weight": "127ab8e7d8c3f882874904196a02712bab42e6744fde45871b67350609d19f5e", + "blk.20.attn_v.weight": "5f0ad2d14a8ae42dd3bbeccfb33295687a14055fa92c54bc946249373c1c9f17", + "blk.20.attn_norm.weight": "77300b1755edc8c70089e0f45efa646056b9add7d8568b2324d2f3e62b64971a", + "blk.20.ffn_down.weight": "ab93d0e075b42e9017b701a070d561e698050d90aac4b4b9919256fbe50c3204", + "blk.20.ffn_up.weight": "4fd6628a07acc57a48d1ef83f81b7d7aa0bce569c1160a99d307284f8821322c", + "blk.20.ffn_norm.weight": "2a9e46b9e48e8e55215de56592e1f189530037c1c94a1428e3d6f106c7f26fb2", + "blk.21.attn_norm.weight": "4b3b5912c7bc61eb9da8e47d4651f896e85d9e59c4ecaa65df7acf3c21737298", + "blk.21.ffn_down.weight": "7146f931663d93b8771cd84405cd4802ea6560d0729b0d6d44588203c095bc53", + "blk.21.ffn_gate.weight": "b44ec5d64388fa40b90b3e9976d97a8b6800fa3b97584f32e64b03daffb8601f", + "blk.21.ffn_up.weight": "0cf3643fd23c685e17062cd11e116e17ce57a405e5e78953bab94cd62fe48789", + "blk.21.ffn_norm.weight": "4ef2cdb53da166df70b39f3e6b17af51848cfa5ea3c27ad6a1ae2a1bb1da1ce9", + "blk.21.attn_k.weight": "5d40f32a706f670c19972b14176bf660d5b045e3637b110dbf8d7de4ff32101a", + "blk.21.attn_output.weight": "18afaa916752ce16c9653ec0ec7e2fe60be55faa2aa5025d147be184adb75cac", + "blk.21.attn_q.weight": "2621daa5f858931514a4b2f0fe8d81cf9b96f541e6af99bfa7539e9bde8e34ee", + "blk.21.attn_v.weight": "63226dafc54c899bbce4aa49efceeedd8908e94faa613450fdda91f332b62864", + "blk.22.attn_norm.weight": "cf3058daab4d2c04387e7d169d1553bb8e7358eea66285ec067703f6ce62043a", + "blk.22.ffn_down.weight": "6a58d5fd220abdbac6cee7ba048abab794731af318f04982c2506df59413d0b3", + "blk.22.ffn_gate.weight": "d5614535324b03c7b91727a903b2a72f8d07ad17f7aa8b61ea173cf9b895069e", + "blk.22.ffn_up.weight": "ec20da3949566e93f66cabb67f8cd7eab399047ec6ebf5d43edfaf3669b82296", + "blk.22.ffn_norm.weight": "84c82f38f53a649972a44466fc476bf764e064ce18de870291edc302f3700e28", + "blk.22.attn_k.weight": "a3d2ecc37fde7c201176bb8abadf27f0d8ede9679a6034913e03d9db924fda12", + "blk.22.attn_output.weight": "5a3b8bb433f43a387df43dd371bdf80ddfac986dfeaf38e9bac1d7a0ec6628de", + "blk.22.attn_q.weight": "3a875cec661b4859f30a8fd2c866811184b25b68c9e36fe2663d299caf8b59c6", + "blk.22.attn_v.weight": "8717a83b79035058dcfd3ef6f8e5b36e71d77379e5a239e1899eef8766fb7703", + "blk.23.attn_norm.weight": "2b4a68a0a2f023dd646e4755c9bef17c2f631901154afd839edac7ac006ec99c", + "blk.23.ffn_down.weight": "29499b1586c6fc4883c9b7a9c8cf388035146b5aecf90c5c4c8c8e082c71e7d7", + "blk.23.ffn_gate.weight": "7d6554036d21c587b9b556428054f9c15cbef96d24b257f906fcef4ae38bd9c8", + "blk.23.ffn_up.weight": "19761ecb288d6ebd44b681c4535661583b1e19dc29e96d0c007333cd8f00aacf", + "blk.23.ffn_norm.weight": "37dc35500790a4ca33807b39cf7af65065e535dc25b9e94f3ed2759f61887ac9", + "blk.23.attn_k.weight": "717547d00323817b0cb40a72ec5f8cf42ecd1f9e3e42715c2cc5e38f07fffffe", + "blk.23.attn_output.weight": "a24786feb6a905fdf166d7500133757cbe494779d4ebcba9eb03046b319557df", + "blk.23.attn_q.weight": "6a2c4a98f138b928d22136efa163562691d3b4ed526d52d46a2fa2694a8f3965", + "blk.23.attn_v.weight": "c6e6081eb9c38a7fda023085957b460e9ea321e1fff408b38c2b58595c39979c", + "blk.24.attn_norm.weight": "5e6283f891e538670425f3e244b08dc6f96f33dfa4aefa913f8eb17212421850", + "blk.24.ffn_down.weight": "e09eb170f389deea0a4a1cbfdb52c12490768a2c60491b7bef8a4c445e2a08f5", + "blk.24.ffn_gate.weight": "af29d815cf49a38fc2ebd0bf9b2dd9933d023a29f2d766981acb9a1b53f09117", + "blk.24.ffn_up.weight": "36ccd9333426666de9d3088bd4dcdf5b624b09dca9e3a83a22fc0383f2d950fa", + "blk.24.ffn_norm.weight": "a88e1692318826db6ac42582d182e51a3c698c655d0e21e04fa086318832d07b", + "blk.24.attn_k.weight": "f7d61d6d1225289bcc502e3bbb0168b4584add0253218c1b77ac92ccef9a1c2e", + "blk.24.attn_output.weight": "85a1363b3ccc87312094c2195022687c16b0dad7fafb9e80bb4ec474d53c29ac", + "blk.24.attn_q.weight": "53482a2c008f42f4fad779ca323addc3712040149dfc12f782417756388a72bb", + "blk.24.attn_v.weight": "67498272369af7dd10097c73b07f731b565cfc9a559e711cc0d526389e7b44e2", + "blk.25.attn_norm.weight": "98dd617def5cb7825ee4833132ca2da2121245921585e1d9e36b93344adc321b", + "blk.25.ffn_down.weight": "7fd477d6c50aed5f424a878dd284343379cffbee8a34c0b6e55100c8305fa13f", + "blk.25.ffn_gate.weight": "f892c9806c8ec22e8aa746734ac9213428c534921cf161239e1d249fdb5d1ec0", + "blk.25.ffn_up.weight": "528bed14c9bf9762f790525ee40412545221f4321d2a2323fa8e73c58b7643c5", + "blk.25.ffn_norm.weight": "ca5831966672e7be6a578feeb631ec3570d3b5afe12860819ccb96e896ffc346", + "blk.25.attn_k.weight": "610d3068cc9b20401f0c3a0efea39a279dd9f564fde19baf3403b2ec2319e4c4", + "blk.25.attn_output.weight": "798aaf702e53b657265ac3b5e6caf3a0ab515bdadfeb1a3a156b4f3bfba76666", + "blk.25.attn_q.weight": "8a7fa25248de83029fb97b51d036a01baebe31fcb4be121ab00dd8b7de209b10", + "blk.25.attn_v.weight": "2a53d5e9f8a1218c66958c6388d3b37400a9af7956c785024ca44bfbc3c7d371", + "blk.26.attn_norm.weight": "5f44fc043481eb0771f3e6d2420bcbcf73140afb9a9feb8eddb6575452acebee", + "blk.26.ffn_down.weight": "944a60a409d0d5b6a851e33c69aca152454b691711a8b96f5bcc488772ab2833", + "blk.26.ffn_gate.weight": "2a0ca4abb3de5593e6693d8be69b63d6d1a639855ac8332a75f520353f030c62", + "blk.26.ffn_up.weight": "0b1df496163f9ac07bf89375d3eb441b51a81d41b47d769a04a61efc18dbe35b", + "blk.26.ffn_norm.weight": "56b8dd046e9be6ea71f7efd80dbd14e7fb1aa020d3cd38e063275f3873fd12f8", + "blk.26.attn_k.weight": "b1dabfabb970e6971c7ea6e53c63cf7ef56341e6a2edd9cf177785cad9af2f9a", + "blk.26.attn_output.weight": "39532c7e836baad164a655fb97ec5114ea4da37ffba9fdea2684f6e4450e6f84", + "blk.26.attn_q.weight": "8f48bf6aaa1252bc149e98af2be1777a5c0d2c3274c6d314171ea9344a41b604", + "blk.26.attn_v.weight": "02fb145f7fd905133750e90571effacadddfd3f4966552dc59982ac3900ab8c4", + "blk.27.attn_norm.weight": "654d168fc3cab716d91261f5719f180b7d697218401633b4878a759f1b5283f2", + "blk.27.ffn_down.weight": "2823272bec3a1c12f02cc4cb24aa4031abd7e9dbe0b02676e2305b21671818f0", + "blk.27.ffn_gate.weight": "b1a1d40cd02f97182cac17a79971d1934ee0daf3aa0bf11303568c636e208a64", + "blk.27.ffn_up.weight": "ed62ec72a020d070e64eb7b50237b32213944727b5b2427f45d989f50df5fb2a", + "blk.27.ffn_norm.weight": "c69649ac65d694b306a905dee8b03b89eec1ed188b1eaaf38f8e29d4b12e38a0", + "blk.27.attn_k.weight": "cc57bbf413f1fd227128dc66efc8590c73634cbd6f96d01ec4878b5e7ca6a925", + "blk.27.attn_output.weight": "cac407ad02361d53207b3c7e25ceab84dcb4347b8087055162e2efe14d11d84a", + "blk.27.attn_q.weight": "0af18e07cee12015761c07c94407024f4f4d77d97bdb24163db0e16669e2cef3", + "blk.27.attn_v.weight": "a1d08fbdfa40af773c5adcf93bd68b78a44ed144e3fc6bbeb8af02e937527eb6", + "blk.28.attn_norm.weight": "f39a51f814512b040a1082143150e4a49ff730f85cef49d7f77fc79d83e91f40", + "blk.28.ffn_down.weight": "74f29ed51055d1c1adb8f0660bbe538a27e016c65650f2d67efc6f1c84fa1b45", + "blk.28.ffn_gate.weight": "ae48bb16487ded6781c60aafc0bf738fb4ae15729952906f247d216592ce249a", + "blk.28.ffn_up.weight": "543009727718ac22f11ee4b17815f68ea6f15ba1f3e7ed5ecdb755cf6417565b", + "blk.28.ffn_norm.weight": "b8f9e54c322079ff20a82b88948cdc2916c22c7db40b9a9ed6d3cbe89efb727e", + "blk.28.attn_k.weight": "55d055ba653b728d6e784f9e013786fed07115c9fdf23367e3941386d5e77db8", + "blk.28.attn_output.weight": "155101c03ddbf18f4fd0694bfc982f33c7bae25c9b087d6f5273c2bfbffcf2c9", + "blk.28.attn_q.weight": "1ed19bfdd22e9c14eca014739982492e9516d411515a8585f65cf754d849e53f", + "blk.28.attn_v.weight": "11ba854dd575c025d37256eee9041f6d1bd2b549a083d6409a09bfc1542913f3", + "blk.29.attn_norm.weight": "02b0bf5e2fcefd11a153cc988c81ba672682e4844fcf6442423e21a0e10d566d", + "blk.29.ffn_down.weight": "594bb692ec2779938721ff4748666ca8370e0e4fe85229503f616438b8884f5f", + "blk.29.ffn_gate.weight": "8bedcf47e91dcb2cf4093de56b048ee411faab6ff472f89ab2c9c113a08e6967", + "blk.29.ffn_up.weight": "e241a547b5fd6dfca8200b8141e21c1c487a96cbc4e5855f181a7ed1be91b642", + "blk.29.ffn_norm.weight": "e63eba5e4c6b288bfd9f15e46e236086456c8b7f1f9c732c0b5de84962a2e7cc", + "blk.29.attn_k.weight": "afe5979d5bcf211aebb526620f5974bcb0a2c39c8be71e815575c55d6385e3aa", + "blk.29.attn_output.weight": "9c944ed44b124b014906fc240afd3b90aed56bbd9567f2eddfd5b7a685b3cb48", + "blk.29.attn_q.weight": "e234e08e5c1bd9245a2edc8d63e9933b6b879f97c01392209cad4f55f05f3ada", + "blk.29.attn_v.weight": "5cb8e3e5f954e775c5a5e4de7a9a62b17e9c6931bb0ff0e2f82c4126fd3e1a1c", + "blk.30.attn_norm.weight": "a65483ee51a0b214144ec8a14f28ea5437586e9e12ebe342a57d1f8627ee12af", + "blk.30.ffn_down.weight": "417959da77ceb33ead4271cbb9428b195196173a893c44e52880a7ec61b4856b", + "blk.30.ffn_gate.weight": "a0d503ffcbe45dc927600bb98c9f6082487e65cb577ab545add400d666a87638", + "blk.30.ffn_up.weight": "f8ab957b82ffcd10b21303cb5e866209b6fe95f827b1b94e9a949207952d12c0", + "blk.30.ffn_norm.weight": "210c7ceb0514a9ef27b5d4d1b3aff6dde43f1af0345a050d71097940e0e73e03", + "blk.30.attn_k.weight": "16861b9abcf5a3fe73c93d977ca45a1e6daa65be0fd85c2cff53486ce2033afa", + "blk.30.attn_output.weight": "ca541fb2e57e2257118c35784845b0c731278af8db3036ac53d71aa1681fdbdc", + "blk.30.attn_q.weight": "f7834917748e26bb456b945e230bc926c228e93696bc01fbc2b134bdeeac71a1", + "blk.30.attn_v.weight": "9292783171dbe5eb689d17c9bda11e537f0e9b328fced6986c938d61ed590e81", + "blk.31.ffn_gate.weight": "e4766a04bcd8f937ba883c6a144101e546747804ca66c35c97281d6ccb47b566", + "blk.31.ffn_up.weight": "cc1e666116f7e6b06736db4aa4b81003c583f54f4d9200bfa48842249940e16a", + "blk.31.attn_k.weight": "fc80b57557687504efae7d24265cb7dc39b8f826bb3d897a11783012dbedc44f", + "blk.31.attn_output.weight": "215617f50a1f5d9b2250b82f3652b35a9e9aa0ad9ef2b485d73965a14b2b872a", + "blk.31.attn_q.weight": "274b4f1dfb0bdec28632705677049fb3e327ce6d9e1f3baaad1560439039982f", + "blk.31.attn_v.weight": "e641b8b926f9dfcbbf6b6da1c02555525ac4b1c306d96f20cfbba7d6662c4e56", + "blk.31.attn_norm.weight": "b3243c361d4041ddb892ce6862dd5091f57d87357e3c67e177451b85d8baf34d", + "blk.31.ffn_down.weight": "0a00cd3ecd5e91624a27f9e239b1de425d5ba3cfff82c256a11a4ad434abf3c2", + "blk.31.ffn_norm.weight": "2a0d67ea2bb1303975712243f07273c92fce83baa11b1cd6d8e42e74ea3c810b", + "output.weight": "768615f077fb797967844571c58b94d7c399d884d115be3ab4b0154504cae892", + "output_norm.weight": "7cc5b7ce10e5082000fa00bfa68af8c7c5da218e59e2c41cf2f1499d40ca229e" +} diff --git a/convert/testdata/Meta-Llama-3.1-8B-Instruct.json b/convert/testdata/Meta-Llama-3.1-8B-Instruct.json new file mode 100644 index 0000000..ad7cd20 --- /dev/null +++ b/convert/testdata/Meta-Llama-3.1-8B-Instruct.json @@ -0,0 +1,3 @@ +{ + "rope_freqs.weight": "80fd5efb2f729381785b293a091a268cfeceb0079167f6ece9b07070e662b222" +} diff --git a/convert/testdata/Mistral-7B-Instruct-v0.2.json b/convert/testdata/Mistral-7B-Instruct-v0.2.json new file mode 100644 index 0000000..88d447b --- /dev/null +++ b/convert/testdata/Mistral-7B-Instruct-v0.2.json @@ -0,0 +1,313 @@ +{ + "general.architecture": "llama", + "general.file_type": "1", + "general.quantization_version": "2", + "llama.block_count": "32", + "llama.context_length": "32768", + "llama.embedding_length": "4096", + "llama.feed_forward_length": "14336", + "llama.attention.head_count": "32", + "llama.attention.head_count_kv": "8", + "llama.attention.layer_norm_rms_epsilon": "1e-05", + "llama.rope.dimension_count": "128", + "tokenizer.ggml.model": "llama", + "tokenizer.ggml.add_bos_token": "true", + "tokenizer.ggml.add_eos_token": "false", + "tokenizer.ggml.bos_token_id": "1", + "tokenizer.ggml.eos_token_id": "2", + "tokenizer.ggml.unknown_token_id": "0", + "tokenizer.ggml.scores": "e3d3eea80bb41a1213f2d0aa3e8a38581d1f19323be77dbd779c9c7e3b72e676", + "tokenizer.ggml.token_type": "6040635e6bd38d98af06698feb75c1802bad35180ee6ae0a503e38c0f60fd71e", + "tokenizer.ggml.tokens": "604ac4bfbd019e430d7b6cdf18c6c0cd5b967900601f0307f714ec7773aa5ca6", + "token_embd.weight": "cde834ccac5e94324b25cb81b02d27312cac0c551b55a7e1d555d90bf6cb6e81", + "blk.0.attn_k.weight": "458bfdd9715c66e017c2447b1ed3c582963a3111479314e664faad8c914f42be", + "blk.0.attn_norm.weight": "e1fd60b95f713bae7b7e3ca933c64ae6c9cd1e8d808000204bbfdc19f0ba635b", + "blk.0.attn_output.weight": "df13b6a157d9d4f96c53b012b3b9bcd207d0c94144cbd22ae3ec13bb07d6c373", + "blk.0.attn_q.weight": "13b4126b4245bf06c915a93317c42b8174e05053535ec99dc576541e4cec7c25", + "blk.0.attn_v.weight": "5b1781d3a341214511b27eb4e268674ea3ea829dbdf8ae5a6bb89b3c0b33fafd", + "blk.0.ffn_down.weight": "49186f5d8148d316b07458841d13a2e66587f4af69b776188a809591ed9c070d", + "blk.0.ffn_gate.weight": "4397e30ece09136f00f4ff84ff49e5241b765a374deb8c5a12e897e2bf73473e", + "blk.0.ffn_norm.weight": "43260589aac3850a779bca3f9649f793bbfbe5db538361cb743b3830217f8287", + "blk.0.ffn_up.weight": "fd7ac918240a07566f6967527ffca58fcf433a30b78fdd6d84b2136d4ebd9987", + "blk.1.attn_k.weight": "209839566c7d235bdc20565a4766378b6ee8553133a5a3315abe8a85baa80712", + "blk.1.attn_norm.weight": "58c52986f7c69784ba327cb7f350923420782bee17fa39b1fbd13839d4005357", + "blk.1.attn_output.weight": "5067cc628449682665dfcf59b16e58fe2a9d2a81cb099f0fcd42f4f8670c6740", + "blk.1.attn_q.weight": "f410f9f0dd5edc09401af597d02e2a4c727f1502ec3ec3898321617b36c6df6b", + "blk.1.attn_v.weight": "d40fa49e07c102c0644e130e7909eaa93ed0d54e2edddc0759e721d58a4e4f5e", + "blk.1.ffn_down.weight": "594b1eff6ed4defbdd819fabbe2d48764984f08878a860bdb808511d5a25b8db", + "blk.1.ffn_gate.weight": "4cda97541e388a5bb607ce4cc8b3db1da7045830a630e7ba4d17807befcff346", + "blk.1.ffn_norm.weight": "66c13d7481be65b97aa474735ddc9674f33d512ddda76fa6fb45c7464b09f1ed", + "blk.1.ffn_up.weight": "1adc6de288ba4cc1237833ca8b4eb81107149842e38bc452e18e5cfe284338a2", + "blk.2.attn_k.weight": "5420423559f236ab22d85a00849f31e0cc6e9c7dd879de724393d8cd2b379153", + "blk.2.attn_norm.weight": "495fe1ab40cc52aa054ddd4f0c2d2790f4326c8d103296b1b38f3b1060db2a24", + "blk.2.attn_output.weight": "ccb83e7085381f558bfd65588c525ad2671feddcbc3887afb4038ad9c7aac348", + "blk.2.attn_q.weight": "2e8f77478392bc93c2a391f2e0f4a173a952bbab88a7aca099c6ee909726409a", + "blk.2.attn_v.weight": "d64512590f3b7ebbb9e77c2eb97fbda90b00d45c944f2b174f03a2cb11007567", + "blk.2.ffn_down.weight": "1de5084a05dcaa6b1bd926e83517dbe9ebe7fde79235fe56018b3028b1aa6397", + "blk.2.ffn_gate.weight": "cbea526b557f49aad8c976973cf367fcd12175b900f551984f498b9e07e4b7fd", + "blk.2.ffn_norm.weight": "530aa49b10c7eae08899d143409240deb95dae4e1d5bf78cea3b26393cff3ba1", + "blk.2.ffn_up.weight": "13a5fc19b96b4dcc1e9bd01998c8272ebe52034c1933ed123a506b711fae9a5c", + "blk.3.attn_k.weight": "1913b63a73305941d8cdc472e7f101c633d3357a78602eac0a4b49a744261075", + "blk.3.attn_norm.weight": "9c11bed5ab41f4adbfdae4ead65b525c8f19443e656a8c61ba412a4e1ad1193b", + "blk.3.attn_output.weight": "bb0b42c1d34779c5943272ed71f1dbb31ad8edd75f8bcd5c868f88505ac3a610", + "blk.3.attn_q.weight": "3461a1fe4e49f5319ea047cae98ccdb46528a3ec23831183fe87610b48c94948", + "blk.3.attn_v.weight": "82aa30be6a61526a41fb79bb28a2617416f5909f0477aa9e95e16be9370fcb38", + "blk.3.ffn_down.weight": "68521011ae03f5e3b0966127111afa8ee9f2eaeeef8d3a0b86b633e0332e9fbf", + "blk.3.ffn_gate.weight": "1e89e26338fd364bb679695968c65106382f15ad55c95cbb5ec9bdfeb766f432", + "blk.3.ffn_norm.weight": "c81932529a5a8c417c27b888dbe95fff8b447c2ea5f6f560444ec5d50b93832c", + "blk.3.ffn_up.weight": "305021735afd8669afefd713f56137248d5e817e60471a112ad06b7fa07ffe88", + "blk.4.attn_k.weight": "cc26ba5c5c28082a79e6abfe61186029e80b145252ca6a7924c437f0bcf2d51b", + "blk.4.attn_norm.weight": "302d251fdcc91f7468cf33f80b49484251d8917d7018ad264ab3a85c8ecf9ddd", + "blk.4.attn_output.weight": "a012f5bee3520cd4ce51f0076c132ebc3653309f304032ad051aa308f55f36de", + "blk.4.attn_q.weight": "3c8d607e447f5ef21e73af71e3c0d32fae16f91f31faae34ff06912cf9cb68fa", + "blk.4.attn_v.weight": "49f6c81a634ce46d71c2350206ecbd231b1732af96e4e4e67693c41a07e007d8", + "blk.4.ffn_down.weight": "e89504f311a4a34dc819a67b761022f14d71c43df3ead4f892c87aaa8e9f0adf", + "blk.4.ffn_gate.weight": "18b22f079a2fbaefe3572eec61fdcd996fd747724e2f0ff4f08cfcb43eb7bfb6", + "blk.4.ffn_norm.weight": "22415a492c168a0878912b05c854a631228b01c3ea8842e1d75989ec46c18a65", + "blk.4.ffn_up.weight": "f57379eae2874d8853f14ddf0f0fcc4ff1338574d5ed5d7e88331d5fb84f5642", + "blk.5.attn_k.weight": "d627af853c40bddf9762ce3988008c1ff17f2686fa8f73a0b5da38010147c316", + "blk.5.attn_norm.weight": "9ce01092c7f7f1c3ef72d6b794da12d77aa1f6a24fb96ba1b9bd5a0bcc3e2443", + "blk.5.attn_output.weight": "0388da8064c4b6b795ce2d8079e8a36535e82b2c9cf794e38ce8ae460aae726d", + "blk.5.attn_q.weight": "039b7ce1c909761fdf475c06cf14cabe5a90199282c89e4dcf460e95a4b6275d", + "blk.5.attn_v.weight": "c47bfd8d2496bdb6e00e03b903e15fd0ee806a515094ec257e43cc433147ab7e", + "blk.5.ffn_down.weight": "1d62e6708974bae318cbf00a8bf621d9ba0537e549ce4710a536520a8d14168e", + "blk.5.ffn_gate.weight": "8b42b1b11c92db19985094cbb50434e3a7c9cfea71ee6f21ea79eae7c49284a5", + "blk.5.ffn_norm.weight": "e0bc520f1505e687ec391d632a381d38d8ebcdec19f614a11a2000ab573e8b7b", + "blk.5.ffn_up.weight": "8cdcd17d2ea89bb9ab902dbc6bf3f827fa4ee029c6bf19eecbdefd146d8b6f2f", + "blk.6.attn_k.weight": "5dc6bcff89794d1756bf57ec665b58622d9352130d31082a6c66e1a079f99932", + "blk.6.attn_norm.weight": "13b26008abe0f119b5104b9d78ebd5e797d3cdd68122b93d73a3b4831a54d085", + "blk.6.attn_output.weight": "f5a49917ea70c3fb311ccfffbfafa63ab18416a5d55e5429b70ce8bfba57c075", + "blk.6.attn_q.weight": "d9c2f652c87dbd09ec3822e12876648fa32e86553ac25afab723b1cd9f8cef90", + "blk.6.attn_v.weight": "5ecc5fe67609a35151011cb526f45c56fc0a999079ae0ff37c755ca03c68c555", + "blk.6.ffn_down.weight": "0ec125ae0ecb2d9277fdb1b04f17efee94e37d0ae37311057c212ca2db3fe6d1", + "blk.6.ffn_gate.weight": "fa4d6d38355ee8aa3b80b476d65ae7e343c9b7770d7b097fc848ee8a6e091d1f", + "blk.6.ffn_norm.weight": "30e8f7defc627532e1739dc76d31223d45767391a431f925b63dabe334b0f392", + "blk.6.ffn_up.weight": "6b97cc32b290fa9087806b5d65aa6dc1760737730c8c71394cc4f30c2157f9ab", + "blk.7.attn_k.weight": "0231cb127cb7c3714cd72b8f39343891d7715a9bab2237ade9e7bc5f4ed2e68a", + "blk.7.attn_norm.weight": "7c3187f07eead7d219d98ab2daf87905e88d5f1ace109b6f5fa55dce3914981f", + "blk.7.attn_output.weight": "2f30ad972c284ae7c8eb0482053433495ebe8fe9c5ee2c28b4bc4ed1f33050fe", + "blk.7.attn_q.weight": "3a2b4b8d61cc9956d304fa9f82a9e65b4bb9fda2196670b16df7e0d8c43eff2c", + "blk.7.attn_v.weight": "d2aab97d0dcf0f61dd2f32848f7a8a99c423a4948a660a660a03a546972b8db8", + "blk.7.ffn_down.weight": "2270d520468c5549cd30023ff9c452a277058310104c4239a616373fc5a94387", + "blk.7.ffn_gate.weight": "4134a3ef71b3eac8f76b6f1a2e58625b3bae48081f175994bc3ed7d8b0d4f2d0", + "blk.7.ffn_norm.weight": "42df4abd4b8769b16f3930068f96960af1b061f1aeb7505384f272233b2badff", + "blk.7.ffn_up.weight": "c920549054ec16ff8c73a72f5d837cf4e11885e44db57c1c1c584c18fbd7a9a5", + "blk.8.attn_k.weight": "01c609bd3bf31ce65688f1f640ee413740e821330134d4ed1877a3065d1527d5", + "blk.8.attn_norm.weight": "48857411f769b00290f4e4f2e593e092781fdc2503f80c1e3eeda1b85a20f74d", + "blk.8.attn_output.weight": "90fb273f8df83744554bd59236515c16c5a5a698ca3fbedc17cc89ddcee354ff", + "blk.8.attn_q.weight": "ade617ac4653c7f00593dbb51837a468afef20a14eaab3780fb96ac3d6714369", + "blk.8.attn_v.weight": "c2c37496494864fee5c527d1fe1f88529d31c73f9cbd02ef9b2e9b23611ea50f", + "blk.8.ffn_down.weight": "2da58572e9ad79087c03cbb0c23c9ef69f93ec221fd5fe4ed92fb93871d23ffa", + "blk.8.ffn_gate.weight": "4483294e628edaa4901708e73e92c917bdd93b780fa01aa74aed57166f2bbf0a", + "blk.8.ffn_norm.weight": "c0cbb7a4f8123b62f0c4652a687f3b394802bc32870dc446eefb709e42043a7f", + "blk.8.ffn_up.weight": "9eaf8a2060cb9224cd585997cd671866c4051ad885c2c6d9fdc7056c2a5c0d89", + "blk.9.attn_k.weight": "5dd36c45fbc9c50fd35c36cd75576288506971eac5c5311d4f5c16ef60099645", + "blk.9.attn_norm.weight": "3c8ca64f2f75ed7c8fc1da010c23be787648139a96ca0ef3ad10be7b14942b8d", + "blk.9.attn_output.weight": "6277e1f833024f53c409be919ec76d34464a78b278c8f9dbf79e777746e3b995", + "blk.9.attn_q.weight": "87352b70d9e328c2d51d59090cf5ea5a046529864a890d0bc8986447a0a5c006", + "blk.9.attn_v.weight": "2efdf01161d7a82a9117cc2d87d37dba5ffefcf730781cb94fcc95130e48ff9e", + "blk.9.ffn_down.weight": "e7658a2ca984961c7ace16acb679387bedb1fef656b5330bbbf588db19673a75", + "blk.9.ffn_gate.weight": "773cd330d4ff5d64be8af00adf2e2722fae4e33fc26bb9d03549f6f4b3b0fe57", + "blk.9.ffn_norm.weight": "c8b86cd5c43b332f72060b807091c33a258e5dac01358ff4733b916cd34c9c97", + "blk.9.ffn_up.weight": "d8cc3bcff18bd46124ba2aa7caacc71220b44eeef6fccb993b4c6cb53e8f2c3a", + "blk.10.attn_k.weight": "964bdf3b4e77b915a216f750ff7b0f2eb1dd6bfa071358aef21010b90111044d", + "blk.10.attn_norm.weight": "59ed411d91d14775764eb514acb0895a75a10cbbfbc1c15d453bc50f8046cb7f", + "blk.10.attn_output.weight": "4d35a2a44cfe4ac0a83fd3ab0dcf1f5a0bf54cdb3b7be9fc353ed32c8a3eb81c", + "blk.10.attn_q.weight": "defff5339450dd881ac352f5c459293f39e07b9619ebd10ed632d79a3f310278", + "blk.10.attn_v.weight": "b9803e8d6a54acea58f662d4c0a5c8ebdf986676de7dfe12d4b288937881ce93", + "blk.10.ffn_down.weight": "eba856be64e4be20b92fb4639a783454dd92427250759df92a337e39f1971c08", + "blk.10.ffn_gate.weight": "2d5c509b066584db4de3632b01234e86edcde35409c5ebce18957dc80fe465e3", + "blk.10.ffn_norm.weight": "ecb9a8679945ff0273856624ce435dd250ffe5a440ea0861a5c84f0e4c44d2c6", + "blk.10.ffn_up.weight": "e76ec7e993f399af02958778c643aa78368e3067846714165eb5aba9d5f547f5", + "blk.11.attn_k.weight": "29c6d1f34bd3ba2f0904e57b32a5bf8dcb2834d439159a33edf234ce0b775677", + "blk.11.attn_norm.weight": "b5817b275149cd2abe18a6a10e19854605fc58fd364666744362ceee8cfe49f4", + "blk.11.attn_output.weight": "1e05653220e237cbe0cc770033e183c9a0eed5680510997409b16186c6691950", + "blk.11.attn_q.weight": "03db725ae669151e4d536e50285b3b047ad097f52475df208ed3e790e31a44be", + "blk.11.attn_v.weight": "27cdf1d4e971326c451a4615a0b79a8c7fe9508f9b76c0d52fa01971fc7eb403", + "blk.11.ffn_down.weight": "176938cd7c2966094f614cace8ba568b10532e45a0d438f80eccd19b6c2a7f87", + "blk.11.ffn_gate.weight": "9782339915dd6fa70013628a01524ee1d01ad8beab04068da7ac6a5ee7603a60", + "blk.11.ffn_norm.weight": "8245f6391e3be97811c0ff27f0d8f484ecc82a468a837c893f059745bfcd95eb", + "blk.11.ffn_up.weight": "15616ddde096d0d25e906375c548b6de4bd5576d1f6b68eefdc29f14e183af42", + "blk.12.attn_k.weight": "66dd21604993edd1b1fe547bcaa06f5bb7e31c9204902d147a227e4badf7feec", + "blk.12.attn_norm.weight": "23a69f85dd8a0904b9839cc5d0afcda299b74e82ae2642106224a1c820f2b761", + "blk.12.attn_output.weight": "4a98d132e376beb274a39d4ea9b6a1b870ad5c66625439d7ff6f45c229c3ca04", + "blk.12.attn_q.weight": "1c6c309d63afcfde32fe37257e300a78e25d01117e33490801107c0e75d1ea66", + "blk.12.attn_v.weight": "723d9e4ebe4e2b1974afa01d8f512b52933698fa36717dd47b37b07760c50a10", + "blk.12.ffn_down.weight": "00e0fb09e1f1fbbf3803f1dee373eaae7a93756b6e13063ab77f9927bc6f996a", + "blk.12.ffn_gate.weight": "89159f7f97aefb1e100107e3ac2d694e1008ad873f79bb953d60c2c1bb22724d", + "blk.12.ffn_norm.weight": "5f70aebd0e43a39d6373d8658cc670c13aadd7818831d3d84f761d5f688442f0", + "blk.12.ffn_up.weight": "faec21b446f061eb4dca561a3180712724347b77a71eb312e7afe9be9e89fa04", + "blk.13.attn_k.weight": "3d440825d19eac3b1753b34d94fee2b3a3cb6636c10b2703ffcf688d3c1eded3", + "blk.13.attn_norm.weight": "47b575e57e410738ad13fd3c74bb49c06b3d31030910834ece509cd1a5c6d9be", + "blk.13.attn_output.weight": "05436d8e613f4475741c1798a7c371b53d61b229507fa04fe23c504ba1f0e12a", + "blk.13.attn_q.weight": "002b5024ce520da41256e3ded5cdc60e5ae07ad9b202cb19d76ab511efd02b1b", + "blk.13.attn_v.weight": "c1f2d6763587c50312cee0d7140fa2c7ee326f5b172bc99b2d8946e08329cabd", + "blk.13.ffn_down.weight": "b5c4e0d8a3ff96cd76a135e415b89f02d28c28f7f3c16a36af31ef0ab8773da5", + "blk.13.ffn_gate.weight": "ae06e9e3d2e1f64c7ad23a4009dc904c2eccd7241f9f91c4974ab2504f116be0", + "blk.13.ffn_norm.weight": "e44a22321bcbcb4a3c345b504e939e8071370f54a8cd702fabdb40b97e0d7683", + "blk.13.ffn_up.weight": "7e6f366d538e21ad431264b12c011892d0be9dfe4c4da9f730af677f920641ba", + "blk.14.attn_k.weight": "95492d6417952ec24b2cab87bceb750fc7e95ac6b1944fc328a3852d980164be", + "blk.14.attn_norm.weight": "6b7b09e1c51addcdbb160ea59edf032531421c520ec5645fe1ff9ca4180cef54", + "blk.14.attn_output.weight": "75887474e4d72c218e6ab0f69f1bf3ec3dc414d51b36fc59df00cdb23421bb6a", + "blk.14.attn_q.weight": "940e33f76e48c21215d19e8a21234c8246d4d084381a7d9806aecb24b071d5bd", + "blk.14.attn_v.weight": "c58601cf5a9833f80f7f9a5b2656e8eab5eb133211446ebd48f8be15fed4ebb9", + "blk.14.ffn_down.weight": "f9f886e7f9b2a54d717b08947a25a0a93e8c2a5b8bcd5a907c06817c8ee3ac11", + "blk.14.ffn_gate.weight": "727ed0ee68594a3f59d704ed3240b6929f083b9c36650fb848d182315737245c", + "blk.14.ffn_norm.weight": "bd2471008ff1b2bae9aa26bea019393fb2bbc5b9493b8cec3ebd2c280fca24ca", + "blk.14.ffn_up.weight": "b006446769f51e4f93b503c4727deae897bc1fc7f4fad49f85024b63c4548d38", + "blk.15.attn_k.weight": "23bb70f9035356624039547a603e46be7d1e4403616eafc2451cc09c5373d522", + "blk.15.attn_norm.weight": "718cb371ca052eeb3bfac6ac506abb887df125271821fd171797a7f2d8dd6313", + "blk.15.attn_output.weight": "c76a2695a204b43a8e5acfa5720590b5d449a9ad9e082cbe3e80fab5903ea16a", + "blk.15.attn_q.weight": "2b3e4037b9e91bdd26d6e8d904cf39f948192dcf09bb6445cb55ca058d4f4626", + "blk.15.attn_v.weight": "7c15e89b6acafc8619e86aa9d412f5893ab17843ff2cfaf40eea9637b24910c6", + "blk.15.ffn_down.weight": "e16fd4bdc6d1c1209c6b633454df4992870c8cefb2cb0e8c92a7e489e9fb5d19", + "blk.15.ffn_gate.weight": "95a46bea366c260337c537fde06b4cbeaeec52484a69c3390bb1d178eb0525c9", + "blk.15.ffn_norm.weight": "37730293f704da265dc6d1896b3be00c39c0a41dab07f573af39dc30a481d623", + "blk.15.ffn_up.weight": "ba74a199da2d0875d7410824238c4ffafbda3993568812284a72b8800df91f15", + "blk.16.attn_k.weight": "f58f79a2a91c9a763adefce0c53a71eb5ce6bd8442f4af554b04b58083bff27e", + "blk.16.attn_norm.weight": "0c16e41b95e81978e0e0e3b338e2afe2d297426578cacee94de15df74e94eaad", + "blk.16.attn_output.weight": "ead22fc337514e4add49aee19720008558e52090466866e849671953a1fccba4", + "blk.16.attn_q.weight": "ef59c4e8fe8918c1add43d7e9c6fb3ef799dd3e1bdd731ec7b6a4a6f97c86048", + "blk.16.attn_v.weight": "902e6b84c2b64241470b13e6f412f859f66b4b223bcfb9c15d5cb1106b07ef3b", + "blk.16.ffn_down.weight": "2ad6e9eb4d8372c32a554395d460d17cfb02d6dbcb757cc962b6bfa36db4f5ee", + "blk.16.ffn_gate.weight": "825b2d50fcce3dbe6a5d8d8a50a95466f83ca4a10343efe67894c20b4628fb15", + "blk.16.ffn_norm.weight": "3bf6ac90befb0e17e077c8ea9454a8485a30f89f2d761ec7751b60c90aed1af9", + "blk.16.ffn_up.weight": "9fbdd08739b32411f5ab0252174d386bab19eb0b17884862f760429b7d41d78c", + "blk.17.attn_k.weight": "4033398718bf3674830ed1b73071ed8482b6dd4ef27f31a6c5fbb998321b6c07", + "blk.17.attn_norm.weight": "714f2e8ac9592966a0f1c02ee979eee8f84586405b992e8ee9543e840199ffa1", + "blk.17.attn_output.weight": "b6bbb618597d767b8f535117be68f92911e4a71d4eb4d8b5d943444151445ece", + "blk.17.attn_q.weight": "b84a0dc00ceb515faa2628125dcec502eed923077b21cfe900a4ff16c2e5f9ed", + "blk.17.attn_v.weight": "4387c7d6a17da9cc7a6bca8f4a75618b20407d570792056283a8e93b6ec65f18", + "blk.17.ffn_down.weight": "47db95c6f1e12b399c3eaf9ddba261782dd71173dd163b52af96541cf87b5196", + "blk.17.ffn_gate.weight": "59abaded0aedfd12f01df81f7a811e84db6a227f51b60abe9a247ca726e87392", + "blk.17.ffn_norm.weight": "b7e86445be5c7b722e01ddb98d5c7527ca86cb827ce0354f2c269e0f2558751e", + "blk.17.ffn_up.weight": "8e31c293bac649d2f60da4b3fc4a3acdce1111ec6058d8805eeeb242443011de", + "blk.18.attn_k.weight": "5ce762ab7b032511c131df81093b587871718c7097f79d8e07d707571f18a47b", + "blk.18.attn_norm.weight": "1f52cdc7af1f4dc1f0ef6ad1ad02e18cda32133654e57cfa9c72ada9c0b1d995", + "blk.18.attn_output.weight": "6486957f30bf8a88516e25772c6650f98b13923f490a2865a8752e36439d1cfa", + "blk.18.attn_q.weight": "93621c8abf69d2ca29c5207180eb628fb2b544d89de6c4a7fb0699be95534899", + "blk.18.attn_v.weight": "11604083b5a74828ac1d226af015ad5dc0215a1fdca44fa7131c2163c02d8156", + "blk.18.ffn_down.weight": "8f9997feb94385f106915df810239c9753b31efda2bf14bdf18a9fbbeec8233d", + "blk.18.ffn_gate.weight": "427c213b3a4e94af703429daf2f65766f70424d8230c123e7e712a18bceb5ecb", + "blk.18.ffn_norm.weight": "c45d305c4ea6a54013ba112f12dafaade064a32cf01317373464a3618d8ba44a", + "blk.18.ffn_up.weight": "a2811f2e73ac9eb9cce91a21a454e84e230a155244e2cd73f2c12aad3c9b8cfd", + "blk.19.attn_k.weight": "b2daed159925eac58c291e2f1e2000beed21002b03c9e1bc7e7a52e22240666c", + "blk.19.attn_norm.weight": "6307306ede2ab5bffa1bcac3f8b139354678c0376b1d9f5530c1fcb4268cfeb4", + "blk.19.attn_output.weight": "ebb98218b2a9c84d3fb6baeb02c5df264b7ab80d994d1098ba1cd47aa398effe", + "blk.19.attn_q.weight": "4f10df2ad09177e7528e9456039b670d07db22940a49417101b725d239c16724", + "blk.19.attn_v.weight": "30f1efc5114badaeaafa91fa466dc7fa14b1616db433c6f563ab851f7333a5dd", + "blk.19.ffn_down.weight": "be5ec7fe6b48855cd0015b0e430d1b70c620de87a7ff188c7c1afef546d7b6bd", + "blk.19.ffn_gate.weight": "10dffea4213881f8a9b583ee0fd370e033756d32255ed15053f794375b9400e9", + "blk.19.ffn_norm.weight": "e75cd24ade45dca78fdb0cbcaaa2d4a17d83a5a73dcc94ce0ec2d68fbdb2a881", + "blk.19.ffn_up.weight": "63e81bdb951410ffa81bcfba1b94a679ec9ebae59cd1623ce2651ed5d4c78bfd", + "blk.20.attn_k.weight": "c2fc5ad39e9bdd45e73c6e54aecc474388d944c4be1ee1921b7fcd035bad02e0", + "blk.20.attn_norm.weight": "aaa9169171937bdce20c1f057e94e9252f221cabacf1ced12e11b9586f23d308", + "blk.20.attn_output.weight": "a9f4fb496e4bc053e3f6cf2e72e22d4cd2b545ef6c32f7e782c2ef6ebcc21d4b", + "blk.20.attn_q.weight": "5a07ac619ed251494170b213921ef3fcc4c2712839da262516d9d5b8ea1ff185", + "blk.20.attn_v.weight": "d6689473105d241eacb17f09f06000ee237336916cf5ec4f48271c5b41bcb8e7", + "blk.20.ffn_down.weight": "74be38db51df736f26ede7c6b52ea787e385f181cb66231e2cced4556a25c9b8", + "blk.20.ffn_gate.weight": "ea91e06dc3d051c0ba0243b5a8bb40edbf254eadfb54fda7247e05cfdd88cbe2", + "blk.20.ffn_norm.weight": "5fbd357b3d6f44a7a91e8a4fc246b24303891b7957e0f3c32818ae5dc16ddd8d", + "blk.20.ffn_up.weight": "fe3290333e056af4ed12942ac72aeba97a6b562e2db05e79cd35dd07eab5b101", + "blk.21.attn_k.weight": "201ec6ee95f06ea5eb80fe86fd07bd016d3ae9ab6abd25d631834414e14a010e", + "blk.21.attn_norm.weight": "ea8154f93e06485828475a00b98cc397ac84768dd70e06ecc0c075b5712d7276", + "blk.21.attn_output.weight": "9f8af74d531478fd304723fd8e4e01578db598441b80dc7c960cb801dbbc501e", + "blk.21.attn_q.weight": "277de9953a8d3cff894ffd06c15ad0ee1407e319df0c1a693d4f45fa9c74ac7f", + "blk.21.attn_v.weight": "6bfdc16cfb898909b7788ddd39dd04b928f31d6732772195d53c558004638dca", + "blk.21.ffn_down.weight": "173877146cb94801157796ee9e5eecf3f46acb3b5e797f90b83a3fc22395eb30", + "blk.21.ffn_gate.weight": "53146713e2ca1be80496024077a028f6b6d749b02e71003c349e113b436f48f4", + "blk.21.ffn_norm.weight": "b28b97e18ab20a5c553ba422f7d7f6014f5902f1d62a69abd20d9fe19a5f9462", + "blk.21.ffn_up.weight": "5c39d0ac4d602b8ec8909dade93b2efcd6b6d9d84a19b252d76bb66dcfaab87c", + "blk.22.attn_k.weight": "01f26272c82917a87a3ccf922fa1d521a952b05de878241b7efe3525b617ac87", + "blk.22.attn_norm.weight": "5ffc96249d8873b506e9eb7158bdfd07fa1429e53c1951430ca7505d25f11c76", + "blk.22.attn_output.weight": "9c2201569358f720244b9c9497e4da02585a167b1414c8a506b85ad75ba990d0", + "blk.22.attn_q.weight": "906036eb4ddf027f6d920f9356a6a2a5e529b96f4e1231a0496d46b4434a5842", + "blk.22.attn_v.weight": "30ede8b0d166003a4b8a81fc99437f557719fc36e5c4dd510c9f161f36a47e73", + "blk.22.ffn_down.weight": "d04c164beabab30e1837b843e18852260efccfbb9d96a34ddd816e6fb3ba23c5", + "blk.22.ffn_gate.weight": "19c889db6b19179f0a62d5981a1506592c65de83760d67afbe00d202202750a8", + "blk.22.ffn_norm.weight": "4885eff2d851b32dbd306bd632c725857e6d164f0fa8b3d5857e572e6ef98ee9", + "blk.22.ffn_up.weight": "365594d8db8e95cf87cc33ac23947942dc326110175cc8ec5a07b5c7059089a7", + "blk.23.attn_k.weight": "badfea1569da0fc6ab817c5727ca3a69b07d9cfd622fb8be5e66678d5b3f7ae2", + "blk.23.attn_norm.weight": "8968f78a379ac3ca5458b4ed4251e8d9112aca6d6dd1ef6440b4bb0b380375a4", + "blk.23.attn_output.weight": "93e43393c03956287b1fe31e9735ff1cfe84f4ae56b83dbaebe96275e4e11831", + "blk.23.attn_q.weight": "aaff73c725a8700ae66bf26ac8869dfe96738eff23a8ff340de2ab53400a5795", + "blk.23.attn_v.weight": "3a86a8dcf14a746ed1411f5a7e634064bc4dfd6511c24cfeccfb2c9ebb6b4101", + "blk.23.ffn_down.weight": "d4da6f37bd7ef69bb203f7b0dd59f50bce37432c70627e6cf274ab81548af5cf", + "blk.23.ffn_gate.weight": "5b6072936c4a693923bb4e3d1473fd45545cb02fc07799aca458ef0449a04061", + "blk.23.ffn_norm.weight": "cd76e37025f84773180298ddb15e0d4ba9cfc7d832e19c791049daa47c6d9c10", + "blk.23.ffn_up.weight": "cde43b99b83124a13b2e4753d12674b3a61dfb34c04703007ced3e8e2aee1801", + "blk.24.attn_k.weight": "457379edc4cce4cbbe107385079019bc922264fdfc7bd1d1ae84343a81460c66", + "blk.24.attn_norm.weight": "0ce0dfab2edeede5da419fa7833db78e36222cf25c358d08f3ec664310f031fb", + "blk.24.attn_output.weight": "0cf91c2fd40c204d2fd4b9c85b69281e5ad4ea8442972fcd44b5fc8e835ffdf8", + "blk.24.attn_q.weight": "87ede30c09eafec6a4e6285674c1bc4637140b168b2da4ed34f36fdb6e176cc9", + "blk.24.attn_v.weight": "4c0b078b2798ca35d6d2c2258fe499820d2bc88700654ba4016e4b028f563590", + "blk.24.ffn_down.weight": "cdb8540c32b1ab988f984484928d39f6841f2131c1cebe90ad9456737fccbcaf", + "blk.24.ffn_gate.weight": "da2e0e913648b5526bd2bbb344038dd067639343aed3b413662b064b0db7556e", + "blk.24.ffn_norm.weight": "8940bd781c610d75eb2be63cfc8d869a3af05e53c963dc7fd4c6f653df5a80ab", + "blk.24.ffn_up.weight": "90cbac2a58801abe11ed6c24560aa4acb949f79429f2aa8ff129ac05868bb87d", + "blk.25.attn_k.weight": "90607131e36998e990ce718ad05cbecd1bcaed010931401ce6baa3b0d93ebce6", + "blk.25.attn_norm.weight": "fbf679c85656c04a6cf8fedd5412c1ace22960e6c2d47f2d43997827811fbb97", + "blk.25.attn_output.weight": "08412724ee7a2086514406e6f68fb9f622e10bac25b0c373b294709f4b09bd2b", + "blk.25.attn_q.weight": "9c1238e98a2747654a0d4371d3e7ea8b979867f609dc42482544f25591e85c7f", + "blk.25.attn_v.weight": "a57796a535c6cb09581cbafd6a91dc14adc8cca2a2465a7ffd0aec546cd84074", + "blk.25.ffn_down.weight": "f7e34e8a6391b480da08b52640613ccadce268373934b409759743a1735b74d6", + "blk.25.ffn_gate.weight": "b8d0b2f4612678b5ce42bd4a683f8024514b75fb5ebf6b22c600811e95582ee4", + "blk.25.ffn_norm.weight": "cde1fdba2369d315f3c6940a997c471ec891924e642505db580d732763bd7b75", + "blk.25.ffn_up.weight": "72e700c32ac8b9c47559c2222e45888a480b527ea512075423c5dc01678e2bb3", + "blk.26.attn_k.weight": "6ac83b3414ae75bf3a9055c32e49d2c40fe611ab21f8444f03d2f465d18122c9", + "blk.26.attn_norm.weight": "55f9d6dc9d75973dc75136ecb9d991b4398097ac133070873fb96ec76a6f60bc", + "blk.26.attn_output.weight": "ebc4fcbd15b33263e50ed2ad45740867cce15bc90e1216623babcb1820734509", + "blk.26.attn_q.weight": "080f057521073e412936fe3fee64fd574c8128fa4a148b879d3e598fe4954581", + "blk.26.attn_v.weight": "0fa2830d6746487ac91b243716e4302361f891e4e008eddd14abec47c7809d5e", + "blk.26.ffn_down.weight": "cb2ab8af1653adc57111ada49d2825c6995e338c8208455b92de10e580f60f31", + "blk.26.ffn_gate.weight": "231ce30966086bce2dc0e0afd34a22a1958cfda7a57c41b3b8e9444c5dfde8a6", + "blk.26.ffn_norm.weight": "35d959d25d17b00617590f5d5831bf705c385c51e46297a14375a700effca6af", + "blk.26.ffn_up.weight": "367680c8d332538b467d1ef87cfeb36cc5c6af564c5023c5fb50e728e3438287", + "blk.27.attn_k.weight": "0bfcb351c6d17aeac5b55a915074fbdf00f11c4bda98babb196ac8804805746b", + "blk.27.attn_norm.weight": "5d598a88c2e75ba59dd7ba4fee940bdec92d72038f1286536d2dfb71d008a09c", + "blk.27.attn_output.weight": "23a9da7347336479f6a10ded14cb3f46e06b5bd56dc4b0fbc526c688552ec840", + "blk.27.attn_q.weight": "b83319dba9055f069208e9c9d66da08bc6874f23e575288fcd81697d1777aa54", + "blk.27.attn_v.weight": "36ed34ccb2f36fdf16b2c2dd225a98ea6b7b0e376e7791191136ccd7bd7a4add", + "blk.27.ffn_down.weight": "5488e1d3a58c71b5e9ddda430540b4776b268cfe1457cbc1c2622dedd9e4526e", + "blk.27.ffn_gate.weight": "4ff48011ee0bac39af704849d9132a2410392c87a509c684f2062f6b76b498fb", + "blk.27.ffn_norm.weight": "32afe99675983da3de2961d1b5ca41c98970a356823597fe29e91f6e86abf0e8", + "blk.27.ffn_up.weight": "1eae3088a75629571fdbf6a20f141bc2bb2ed3f5ba2b9fd1d949f80695e442a1", + "blk.28.attn_k.weight": "c4e80af714962d6f9040d2c09f316f4a1cbc3a2e994e19902d7c653cf3c73dba", + "blk.28.attn_norm.weight": "c1ecf85dedc1c83d5d402bb7c94fb8b9c11f1a3e5f64e7680f80912d4a560794", + "blk.28.attn_output.weight": "72ba47c061b21f5ebc5213a455eaf6fc49c8f8e04ff9ce37e6ed4921b629161d", + "blk.28.attn_q.weight": "c4abc47234307f44b8ca789aa6668e298158fa4b459b2c1e84bd581806591cc1", + "blk.28.attn_v.weight": "aeba950799d4950e491ad0fcbe30334e39b8975177990a2cb339031c45ac153c", + "blk.28.ffn_down.weight": "4e84ce382a37b994fb8608df451a60040559e3f4f3241c3b3cb8989a3ed50d83", + "blk.28.ffn_gate.weight": "04df157acdc8e8534ad60acc2d2a4dd3a7a6610f6382535ec728994fa6f83f83", + "blk.28.ffn_norm.weight": "4d0386dae2bd1c1a9d0f9730718333e3a486c3bc6a5c5d482193c75d39832c80", + "blk.28.ffn_up.weight": "fec60bb0a3daf182a14bd8311fe6dd1e3fd020c5fc273e2549cdb1a2d6b79b05", + "blk.29.attn_k.weight": "b0532a263aa5a4e2a7a80adc83fc5dec974493bd18da7f953e7ebfc3f3a19aae", + "blk.29.attn_norm.weight": "593fc3b4000c35b7a59dace09ca1756c08be0105b2edd354a0e1c16c82898859", + "blk.29.attn_output.weight": "315b896f9f0cbacd0ca8937384c3a3a227efa908cb8c3a9125ec00c480e32b9b", + "blk.29.attn_q.weight": "d482d45386d4ad3394f08e9dff233ee3a70d0427d65c0b8fa05905da7e25ca53", + "blk.29.attn_v.weight": "cd3b5a6e2852da796902930a6a84bc87fc6a7c7bf51f8fc23758d12a39013b36", + "blk.29.ffn_down.weight": "5b3dba6f9753bd1b1ebcba65ef5373dd62c38e755c44b7231b95d93d45761f89", + "blk.29.ffn_gate.weight": "8610d9d2db15c256243ffcca3ffd31786d0ada0af0e7c7aa3fd20524370ab036", + "blk.29.ffn_norm.weight": "1a2ef2d38b7ac3e51190b9ccb8b6552ba83ab290e523356a7f851ddb35dedca2", + "blk.29.ffn_up.weight": "a5fdd15811bde16dc27677cf1a4c97daab4c28cb12a9530f1a0e573134fdb69c", + "blk.30.attn_k.weight": "1efeb0b5f4b45a85cdf47300f892ac77ac1f38000ec3653565d1303d1fb8c743", + "blk.30.attn_norm.weight": "c73934c182c7fe80838ec1d0b92f50a583f75f7a3d78d822f009b58ad2c80e65", + "blk.30.attn_output.weight": "3a0fd89de2d274614750345d827a9c886a4f97b343a13cdf680390505df596a3", + "blk.30.attn_q.weight": "711e113362bdb067db843c66236704eb1cd3fc5f40e3767143e96d510686ef4e", + "blk.30.attn_v.weight": "82b12a9a74fd3d91b73cc2e841e2b3f0a5197ccd2998afa17020995f880d2267", + "blk.30.ffn_down.weight": "af9f4b1287c0d824ae22d6e335d19e04a70135b835be7caa2435f1d85e931993", + "blk.30.ffn_gate.weight": "e2ab3e6f15f5c50fca66c084cb6a57a2b6b82406d65150e82ea0437b93dd9a46", + "blk.30.ffn_norm.weight": "c1b9c325c83f00e177386a4d7e769945f2995e60950c4a576c0a2c4ab9703d04", + "blk.30.ffn_up.weight": "9b94a21efd419715d82071b490d3b635cf1e8da080620dcc39e5bde976d7e9a6", + "blk.31.attn_k.weight": "0db0d82e3ddcc2c06209f5f013e1d72a84a996c40bf00186be485b909cc268e8", + "blk.31.attn_norm.weight": "2b8b7239471f57140c5cdfe06bd224a4f6326282f99736e44fba4c7b120ac101", + "blk.31.attn_output.weight": "a310b048840cc3ff2be4b84796340e8e2cdf05ec89d14bd3655c109b2bfa9fcd", + "blk.31.attn_q.weight": "f45e0cd95645175ea82813455356d171838539bc3f7676d877c698f2af0a0eda", + "blk.31.attn_v.weight": "8bde008e809112aa7e7c23e9c3099087bcc557313b01306c87efa0a4a30805ba", + "blk.31.ffn_down.weight": "8266fec7e203fbfad7033120861e44984581ff8b6851d01dfb7b81c5d8fa90ec", + "blk.31.ffn_gate.weight": "b73bc0aa5baf006d9ef6403104891b8133671b0992398fe038380b67e0d7e2cf", + "blk.31.ffn_norm.weight": "9c62cc27a7b6017c1df8ad49bff249a8245e8895c6754f402cd44623fda83268", + "blk.31.ffn_up.weight": "5b970a4694ea3171a0167f6e1636d9f00268bc1c9640430ffc35218494884adb", + "output.weight": "74fa0ef08c57a30e633e7117b1e9c805f833e2e5e21434bc79ddf9c92c6d7330", + "output_norm.weight": "59b8a59fd3fbf39353506116e43e5e76edd0cbf2a2873d869da4cf27a04997c3" +} diff --git a/convert/testdata/Mixtral-8x7B-Instruct-v0.1.json b/convert/testdata/Mixtral-8x7B-Instruct-v0.1.json new file mode 100644 index 0000000..a159653 --- /dev/null +++ b/convert/testdata/Mixtral-8x7B-Instruct-v0.1.json @@ -0,0 +1,348 @@ +{ + "general.architecture": "llama", + "general.file_type": "1", + "general.quantization_version": "2", + "llama.block_count": "32", + "llama.context_length": "32768", + "llama.embedding_length": "4096", + "llama.feed_forward_length": "14336", + "llama.rope.dimension_count": "128", + "llama.rope.freq_base": "1e+06", + "llama.attention.head_count": "32", + "llama.attention.head_count_kv": "8", + "llama.attention.layer_norm_rms_epsilon": "1e-05", + "llama.expert_count": "8", + "llama.expert_used_count": "2", + "tokenizer.ggml.model": "llama", + "tokenizer.ggml.add_bos_token": "true", + "tokenizer.ggml.add_eos_token": "false", + "tokenizer.ggml.bos_token_id": "1", + "tokenizer.ggml.eos_token_id": "2", + "tokenizer.ggml.unknown_token_id": "0", + "tokenizer.ggml.scores": "e3d3eea80bb41a1213f2d0aa3e8a38581d1f19323be77dbd779c9c7e3b72e676", + "tokenizer.ggml.token_type": "6040635e6bd38d98af06698feb75c1802bad35180ee6ae0a503e38c0f60fd71e", + "tokenizer.ggml.tokens": "604ac4bfbd019e430d7b6cdf18c6c0cd5b967900601f0307f714ec7773aa5ca6", + "token_embd.weight": "1d1d1d39a867d5a4bfb32792a47247d2638c10c95a6259391d02843583505cc4", + "blk.0.ffn_gate_exps.weight": "2e5cd43ac3f26c44f071926ff6c3f239ecc52a34bc9a5b5906d3d4c1bf2fbbfa", + "blk.0.ffn_down_exps.weight": "a4dfc7e7c96e7402eb70279601675b956bb7331da8101e63fe5c0a611b6972e5", + "blk.0.ffn_up_exps.weight": "2d5d87b378b2319c344ed2c642598b6f7cb6beeb582a8ea51abc9ae690d473c3", + "blk.0.ffn_gate_inp.weight": "a46aaf5aba7401ce6e41f158242b4879d34901661f3ede85496cbd0ce79d6314", + "blk.0.attn_norm.weight": "3fe37d913bdd2b65076bcdd6efe64a37b0b03cacbb1b80b9f7089068aa35f38c", + "blk.0.ffn_norm.weight": "5e14308a3c894734eb204c8f558bdc817e94bbd5b4e9cb4094e91ba388c8f7f2", + "blk.0.attn_k.weight": "73d943dcac0911e87bd771f4aa1c901e1bfe1aed293af06e1a67812159859f67", + "blk.0.attn_output.weight": "4c5f754c855e262e8d4c94c6fbbb57af06399dc0e170d7d99a1a17fc9aab9227", + "blk.0.attn_q.weight": "d6fd7403c873d49c05f6f03208f30d99ad34cb3b71c9990c47334d502a8e4c7b", + "blk.0.attn_v.weight": "cf17cf64b2d683bd9de6cebaf60e5c264df6fdc38fe719dde9d54c80334f6366", + "blk.1.ffn_gate_inp.weight": "0d524de81cd915816b4e714bf595ad6946a9130b3de731cd89428b2781230809", + "blk.1.attn_k.weight": "2ea47f412992b374c70674730fe84700e0c8cce177086ce9b6635e42408964bd", + "blk.1.attn_output.weight": "b4b2520794d54113e86c8ff678eacfc62e35be4395a594a6c8c22b4383ebcc0c", + "blk.1.attn_q.weight": "5db930c98c4f91f6eab57eb974c72210b158e366d23d6d2890b2759c053bee33", + "blk.1.attn_v.weight": "079bdde09668394bf7af9f8bc175017b4f48f0ab64e6dd855a4d7561d1693c0f", + "blk.1.ffn_gate_exps.weight": "146a62de19f9ab093deb101f9640534ffc3dc40d69f508be12fc0475d01b0c7a", + "blk.1.ffn_down_exps.weight": "949da94a3c0f375160672a979e85f7def284264b10d48d038238aad5f5ece793", + "blk.1.ffn_up_exps.weight": "7016a3f467d9e3f2f4b4019579ed86b757469cd367f2b225483305376b4bb3c1", + "blk.1.attn_norm.weight": "1614d1e6ed537737275eb888666c7bac533f4eefbe73dec92b591045ca9e1afd", + "blk.1.ffn_norm.weight": "405a455fa7d1ec36894652ceb554bbcb09a07fd6405f42741e66dc4a4665c19c", + "blk.2.ffn_gate_exps.weight": "90d5003fc7421f44220c0842d43128955e91488f6f785fe570b62d81b719e964", + "blk.2.ffn_down_exps.weight": "ecdc2b5a8b504ef0a7833acff47d69b0c1fa9c22126de1bb120ff5e48c3d6e2c", + "blk.2.ffn_up_exps.weight": "2cbd9485a32460d315eb50a2f3b00863fd77245bfe885b7565efac1cdb1f191e", + "blk.2.ffn_gate_inp.weight": "0d0a17a1a2c7a61f2cca49ecbb479154dc93a870873257bc4f225e7607f2e2c2", + "blk.2.attn_norm.weight": "b2e4c5a977f87a6f880896bd73596234c9b83622fa0d7add5892501e3155913c", + "blk.2.ffn_norm.weight": "0ab875b4280afa922376cfc7b9aa3f7071c9432ea1254091ce7de3749df0e8e6", + "blk.2.attn_k.weight": "bb884af51fb51550acfef54ccf1b58ce8284e587806e6a2f88c8265e1ad05a5e", + "blk.2.attn_output.weight": "0f03099ba1ef342ea61af9cd71d028123bbd8b1dd7d7fd9b509aef77815427d9", + "blk.2.attn_q.weight": "8fad0d29eb4c9d24e564774ee3316b9eb7a4c4985e4567111d2c836c830f6cf3", + "blk.2.attn_v.weight": "fe04c847ff677632401a94e7b6b6fdca60391ab21cb23bd791533115de6303a1", + "blk.3.ffn_gate_inp.weight": "29e3aaa724590c070e614af8288939603d2641b0ef11e8c0f476bebb2776673c", + "blk.3.attn_k.weight": "231cc5631def10f7f292d8862d6125ff555164cd70480ac76362149fad204497", + "blk.3.attn_output.weight": "86467a605c62852e05fda1a7ef43150df2cf715fe59785dbcba09f1c27cfa086", + "blk.3.attn_q.weight": "901822402453922225c2d6ac79616691d48217635d5ff7338daa971d5ddee210", + "blk.3.attn_v.weight": "27030784f44375720df2f090933645a31a022d3fb3b14573e5ca0b78f44070c1", + "blk.3.ffn_gate_exps.weight": "231ba59cc0b988d125d77bf627aa3f04636684870af88f081f3944b48a160d86", + "blk.3.ffn_down_exps.weight": "530c3ab44ae4d66e8afa4d10c153ba5dfcdfb7321989a988e62e9d12e7234625", + "blk.3.ffn_up_exps.weight": "b85c2d4d9d11332e702b3c0a6610d4f525f9a93e5d12f5c7c55c592c40755e75", + "blk.3.attn_norm.weight": "05dbb6d88cfa6b199f9d705ccbda97c0ef13f9ec875c595398a1a42d009a4555", + "blk.3.ffn_norm.weight": "6880b1c27d46969ce36fac049c05dc8b89e4bb47dc89df357e32df7e18fc512e", + "blk.4.ffn_gate_exps.weight": "a883b4f225b760c5a2f6605dc5e2167ab85bb398c70bf64ceb539fcbd6128dcd", + "blk.4.ffn_down_exps.weight": "d291bb656aae77947d4b525e2819bf4112afece53ff31de9dab999af1f65f9c4", + "blk.4.ffn_up_exps.weight": "38592afb8ba3dcfb26970f906174f7d3fa62da44fa4be4fc6912a19030ea9164", + "blk.4.ffn_gate_inp.weight": "1596cb74e8fd6c3080b937b06468bb397b0dbb661e6d180a6bcbdc43e8bfd0c6", + "blk.4.attn_norm.weight": "f90c83c5ff4366281d283384efc941620542b9cfdea160d678dc54a75e33f758", + "blk.4.ffn_norm.weight": "d28d8c49d1746b7cc085562d1074905fd14023844de823dc4fb22202bb280790", + "blk.4.attn_k.weight": "792bbf412cc357140fdaba543e547a9b2f7582919e307bbd9a80c7d6d8f5f1f9", + "blk.4.attn_output.weight": "d98e4a062d2631d9c315f1990d5f6ca9a88e7e0e46387f611ccb0353f876aa12", + "blk.4.attn_q.weight": "1a11a55a91d9f748a72176ff6b1c174844df406e00d1b66b9aa64dc6ee4bcd1d", + "blk.4.attn_v.weight": "04cb3c02b12a6313c7ac7044513441083d534fb4c5a3f63bbaa58f7edbd2fadb", + "blk.5.ffn_gate_inp.weight": "cbd5cdf015d33a2da6703eb74c22fcb97581fb9175435173b6dc4f9e8364320d", + "blk.5.attn_k.weight": "4fdf3405e4d657403f5647b51233521310ee984b4b81bbcd901cb3e6ab76b7ff", + "blk.5.attn_output.weight": "4a25662c46979a29600ed77e1907cf81fb16ef30e724c155444e54ccb76af481", + "blk.5.attn_q.weight": "e2acb30e30b97300039bb20ad0878f05159d5657fa811748a51d5b6fb35d631e", + "blk.5.attn_v.weight": "306504b6a26aa123c63dbbed3f4ced0ed2ee8fb6a30bf0093539b817539f5ece", + "blk.5.ffn_gate_exps.weight": "7e34df9b9944dbeea5e8565786d3aa6937314a4b87acd4d0874687877c5a39fd", + "blk.5.ffn_down_exps.weight": "c4b7a57a42b5ac0a8ae27dcd5cb2646d7a7cc7123126d44a56ab128e85f60b13", + "blk.5.ffn_up_exps.weight": "09d47593b6dd6c664a9155bff02fc2eb7ac4a70219a88162d05c802a01d3c6ba", + "blk.5.attn_norm.weight": "58804a036d6ac4c1fe357b8b6a97a5c37cae1c2f06ee0086c041d449c1c6ef6a", + "blk.5.ffn_norm.weight": "d872dee6789f0826211aa46ca9d0869e3e96bcace9e77d6559a7b6f3e524f3ca", + "blk.6.ffn_gate_inp.weight": "fb1eae732e974d6c1d020a5b4ef98c5f33016f984701bcea656f999a99daad66", + "blk.6.attn_k.weight": "55e9c59c5051ab5519b3a7962e1b5fa96a3c0251cb6200dc2f177885ad2de470", + "blk.6.attn_output.weight": "f3c834a8d0027370350e2b6294d95434d31432e57be6313b013c15a56303d61c", + "blk.6.attn_q.weight": "efaefe5f11c2140dc7cb532b0832c2a0b363a165cbda21f00fadae77efca377b", + "blk.6.attn_v.weight": "900bd734d75616d846a90a121c97e081c956a3d1ab012f66dd0bc62c43e1ec3c", + "blk.6.ffn_gate_exps.weight": "312a99661b1468fcaed2474621116f1681432755e973f3ee79d01912974fd424", + "blk.6.ffn_down_exps.weight": "ac9cd7db67a2ef0d2b5def86873673d05e48d49d147dd944469dbb8e2d4c46f6", + "blk.6.ffn_up_exps.weight": "57613e7e09579400a1a09fee4445acfbfe83f2f327fdf317877787d96ada6b84", + "blk.6.attn_norm.weight": "0e8801e09885c633bc01a9a5b85d4e878d30158a4eb41a937dc5b760ebd044cb", + "blk.6.ffn_norm.weight": "b8c58062ac93072f878446b0e7f958c737aa47fb769fc3a8f593133d12db2dd1", + "blk.7.ffn_gate_exps.weight": "1ef611732ff13edfa8d30981ed9dac00c15ceba9fc012ed0b199e9280a849948", + "blk.7.ffn_down_exps.weight": "856c6811945c7b0fa461ca17811cfa43436b4cdf5326bad23cbc30883486d7cc", + "blk.7.ffn_up_exps.weight": "6725e3e33994302ee13fa5ec163631ce2dcaa08aadde8fc166c2265d4561c5c5", + "blk.7.ffn_gate_inp.weight": "36b49d7f80c1003dc392b2c1b9960cd49889dd69e77b26b9e4b13d01f3d0a32a", + "blk.7.attn_norm.weight": "7a0ec49acc5e20ee71c6f80ca02f4f1e564c485e0ae0621309e7c2eb0c616cf0", + "blk.7.ffn_norm.weight": "eeae035c39ab6e64bc06a4baa1bf6e50d4c8b8797cb0ad8abd48be86974802c0", + "blk.7.attn_k.weight": "e8f78c1def01a7a38d2d9bf7becb17755e28fefe4927856f7890fbee52840187", + "blk.7.attn_output.weight": "5367f05ac3bb49ef8745ba5902e1bdd4442415a3ebff2c7e1a3918d7be6fe948", + "blk.7.attn_q.weight": "37c95fc5acc55a4f6e5f02cab9be60e4fe54c08b65f98f4455741b4aa542ff4e", + "blk.7.attn_v.weight": "c89f1343486ba55814233511e94090f7365662a8a4214aa4c278cdadc79196c2", + "blk.8.ffn_gate_inp.weight": "4e239afe8c7afb8de3a005757c887cf14b1622ca2d224227591cb0e5301f4c17", + "blk.8.attn_k.weight": "2ad0229f30fdcc1e85ce64e00d8f75902238294844a81d5af43e14ba75c02983", + "blk.8.attn_output.weight": "2e44a4722acb3b521b81d0b910f8ca2f6c286d874a92ddd02150566454061699", + "blk.8.attn_q.weight": "1cd2b09cb2f43e08de776b5f7eac197a5a6d4ffdfd52b21baa36319450147bd0", + "blk.8.attn_v.weight": "5a22c57ebfd33ac500cbcfd321d5b5b1783f8728801db6f3f8bed51c7183e4db", + "blk.8.ffn_gate_exps.weight": "91063fe56cb4f3ff3b41052bb5046fcf8ef61516a603ee90aab893a9d68c15a7", + "blk.8.ffn_down_exps.weight": "d4c3abc8f1d1b462f67f70bd8f404b3fcf45dceeaa8527fa120527254c383c90", + "blk.8.ffn_up_exps.weight": "76a1a1f08ec577716a2e7027b45293e9205751126424f1bebe1de89c78f087d5", + "blk.8.attn_norm.weight": "f980d774da39eb76c52358afac3e38cb4c81cb323deaabbe5c41822e3f17a98e", + "blk.8.ffn_norm.weight": "1c937658cf90f1a85db9a5f26e077730fdd4b694607dbeeb825c5fb2bc407e0b", + "blk.9.ffn_gate_exps.weight": "a2532471ecb7896d5c78e5a34e10cfaf4125265e1595166c8d0d0dfbe2a3187f", + "blk.9.ffn_down_exps.weight": "b47921a28412d48fee450b8b9d97cee42344a2e69f06d407fd9523d7adf13333", + "blk.9.ffn_up_exps.weight": "7c461bd1b2a73b439cff6a10d94afa01e8b06f7e6f09d9a6f28e3876aef48bce", + "blk.9.ffn_gate_inp.weight": "1648dfb08b5c06d7953a5a97ecb764995fae9487fb729a1c867023b2538149d0", + "blk.9.attn_norm.weight": "8635db0f299882a63b7cfcd1d4259c9e53fab22c31d3d054de36b1001380b31b", + "blk.9.ffn_norm.weight": "f9309aa323062d174c463613afef9b0a33501b510bfaa58a8e0e866d12ffef3c", + "blk.9.attn_k.weight": "dfe62030441e947a588512d18d9c6e4ed72c2f71c227d622c095e4263b23dadf", + "blk.9.attn_output.weight": "1977beb75c6349c50ba7dd3865d7c0a9c5c5ddc854413147b0eec98ac4fda351", + "blk.9.attn_q.weight": "eb132596719605cd6bd1782487f121994629e115190edd69240b12af66e734f5", + "blk.9.attn_v.weight": "9e708f15d332d7c5187b0693b1a977eb30a2fa10bf7df48ed9d7537c0aa6ed99", + "blk.10.ffn_gate_inp.weight": "97503a5d166c1925f9b65c0eed980753d411714d66896f3d0fad5286c7aba702", + "blk.10.attn_k.weight": "1ebdd222336bd25b48df1b138cdbe09021c4a5562ea7cb78cadd1255d2be3a39", + "blk.10.attn_output.weight": "5e98faa38e9d514b9057e1c8342c509cbe1083defd518e506f6bad89117d1f5a", + "blk.10.attn_q.weight": "3323a26c87d936d1dd87c577d0b763459fced726679612c874b3de5fc6d969c5", + "blk.10.attn_v.weight": "d5fa73cb56aca388e205f44455e4b4f676fdc12ed7fac4542fbb3b41ecea59ad", + "blk.10.ffn_gate_exps.weight": "225021b53782800906cd13b70be3a4161e8b300b97f984a959ccad6a6e8adcbd", + "blk.10.ffn_down_exps.weight": "f08eb91526bd22f5fd0402fe925d6141cdbb308a1ced0330858d0c85c71f5ef3", + "blk.10.ffn_up_exps.weight": "a9f688350c3b53eaada5103b5848bd9a3d7d6b327a70fa16c24bf28ece933eac", + "blk.10.attn_norm.weight": "5ba426c9dfc79805015ccd76cd1068b0ad3bb7a8453e14bb1d35486f122d8f95", + "blk.10.ffn_norm.weight": "98891d6acbc3986b2581b7a3af9f5946a392d9188972c6a8b15d4e745a4f2482", + "blk.11.ffn_gate_inp.weight": "b2365a60566e7dace892e1cb0e62eb73ce387352601723e847052b34874feaa6", + "blk.11.attn_k.weight": "0efbc1d1430505543ff71532a4fcda821aeac616ef6c1dca40e00d4f2ff70bea", + "blk.11.attn_output.weight": "3d5bd4d9a41236f30d4293edb9ae27beaa113ffb31b4fbfadff3a4c370dfd3e6", + "blk.11.attn_q.weight": "aa11e9db14dd9c77951511443077c2a1a78070753d7bd3d9811038473f69e325", + "blk.11.attn_v.weight": "5adc567f377aa11d1763d35f50e53fb2896a8b03b623ac36acc45efa2486d512", + "blk.11.ffn_gate_exps.weight": "71d07d982aabfab9eed3c733d49c20f023bf475368fc71db5084d91beadc4b47", + "blk.11.ffn_down_exps.weight": "9a06e61461e48b3925a9f7d9cca634d048c8b62163d7bc5c43e35899f959319e", + "blk.11.ffn_up_exps.weight": "bc05494d0dcec61021b3ac0c5bc1bf502736cadf48224e213bc139d562699a89", + "blk.11.attn_norm.weight": "a5758a10bdd0404ae1470e8e9db903985d4d07f60553c5001a5e7b660d4f7ada", + "blk.11.ffn_norm.weight": "814ae037563aad3771787316bec4806c95bf6f5991dd6474b4b1e5cc13dc18ee", + "blk.12.ffn_gate_exps.weight": "3a68b831ba1606fb9ef6dffed4732032447ecef23ea563ff4e79317586c7eb49", + "blk.12.ffn_down_exps.weight": "268b25e13f4b7beab08686e83705a41b21d15251809ee4784526f78a580da829", + "blk.12.ffn_up_exps.weight": "9105751a5b5b42ca2614d0456f24f779d2e2ac8cdff0f96842aa7ae2b70f341e", + "blk.12.ffn_gate_inp.weight": "d0de1558cc1d458c5c504f63ddc59785c323df7330474bb0644c346104b40a3a", + "blk.12.attn_norm.weight": "859a4c8113678e2e202d10299850e0cfb52eb11ea50bcbf4fe3ff39bdd394154", + "blk.12.ffn_norm.weight": "7fbf4c459c1760218877e9ee3f5ad49e960956a4369bcfe96c143f04ff9ddf97", + "blk.12.attn_k.weight": "0a7e254fdf3730a57372b6ff421a613eabaea68cdefd64800857941411318374", + "blk.12.attn_output.weight": "ceb763fc15d88af149d8fb78e82db2b7dab3aeae584af8cf7611a12356a397e5", + "blk.12.attn_q.weight": "a43402d23c46cb2d3cb3c2a98c81b19d10026b7e6742370fed6b2880b6e049b5", + "blk.12.attn_v.weight": "3bc24f2c0480ce91ef72993ee8f1cf962f7359e12183424583ffa1246bf3db52", + "blk.13.ffn_gate_inp.weight": "a6d68c82bfe66d8bab68f980f5f18268a9e2c0cd6b8832ed39010e0de198ae05", + "blk.13.attn_k.weight": "0166c39546b37dc2e01b2b396ba43e183f797dd04eaa51a6d103d8b58ee4bace", + "blk.13.attn_output.weight": "2ce5eb198deab9557475a58b69b11e9874b547e05c23f223c6e42fa35ddca069", + "blk.13.attn_q.weight": "745c1bbdf434284a7fae98f45e821c076dd9c2a2467dba6a9d8cf0041e419dbc", + "blk.13.attn_v.weight": "9ece68d5ac64d1421ea7aa32e1cff9cc1fecf5175f4c4da858dd31d8633e3337", + "blk.13.ffn_gate_exps.weight": "ccfdcb4670b131689de12d396a010b5ea737795cf5c15a14a304d720b3c7c899", + "blk.13.ffn_down_exps.weight": "8b8fb328664764f1aaa5cbdec336d5654e981e965a02ef622bde5f07ea1c164d", + "blk.13.ffn_up_exps.weight": "d2ace0236c2fb3365fdc85499d676a7f65813c48e5085348b1df1799922766ec", + "blk.13.attn_norm.weight": "1ed29d7d89ce52d7cb4d57e895ff7115430466e917136c049c385c030ed44e9c", + "blk.13.ffn_norm.weight": "a194fc542597a4dcfdfaec5e3cba2a2b2b21b21edfc87c39c0d7f7651355bc4d", + "blk.14.ffn_gate_exps.weight": "a625e3574e5e740e7f8e2f9c40390f2f382c720aab5b10534e298002dd8d1fb9", + "blk.14.ffn_down_exps.weight": "bc366f015b83c865946afd74c8a884943e0ea2c671314a0b7bb72f21a44d2f78", + "blk.14.ffn_up_exps.weight": "ee3199bf2086de77b49f57f487676be8ee70e102a2fb5a5ef8ddbbc28a9eff41", + "blk.14.ffn_gate_inp.weight": "2b437870c850fa2e2044d032bb02908af634356e37466fdae260b933e48ee8b4", + "blk.14.attn_norm.weight": "cd8344d193a1cbd42bd898e17f4bcb1ca0b2918420fbdafa9249a6f2b7f4ae06", + "blk.14.ffn_norm.weight": "70eec40374e558fed5b07257283cf36342b6b0129285a00007deb59c32c9f7c8", + "blk.14.attn_k.weight": "4053bdb507e0543d724b632570bac86b31707696d90a0db44c49b2a082e0d599", + "blk.14.attn_output.weight": "0182632cb0e06a07241b8293d25d109fbc1862e1e337d435f908e8681e2eb1ab", + "blk.14.attn_q.weight": "ffc7794a4c1b6f793c842dba969435330a7a80b9212e457b4b2ac33e68b41241", + "blk.14.attn_v.weight": "6411805292d528e61bbaad8f9aab9dd073529a17946c057fb06864fad9cf3211", + "blk.15.ffn_gate_inp.weight": "77d0744567c76e6abb67f81ba9c715b2b544841186d5b948309571eff213bafb", + "blk.15.attn_k.weight": "1f7957954ea4c6521c257b35a360e868ffa02bdb3de91f146d5e06bb4a545c98", + "blk.15.attn_output.weight": "d7809d36bd8d3342240c46fd87bcc7f9821a222f48d9a95e45ae50460265d3cf", + "blk.15.attn_q.weight": "25f509313ae4d8401b871904059f472a26f5714e7c791c725de77a1a522c976e", + "blk.15.attn_v.weight": "96fedf5a591fc0f020e6de10fd72ff12b3ef9cf70cd21dabaa0d3e7b06f54e73", + "blk.15.ffn_gate_exps.weight": "8f950d976b2fd9a3d213b84123cf114c1377efde9352767fb2ddee89e177c8ef", + "blk.15.ffn_down_exps.weight": "6fd09d1557bb94b06efbd4f6a1ca4be532a202ba290e9315bc8da3d12a5c4c4a", + "blk.15.ffn_up_exps.weight": "cbeb59ae7b0266a928dc7e3a6e70a9330b92f9ee1b17ee1ed91022108204a33c", + "blk.15.attn_norm.weight": "2005330911ac2edc7b6d27aca021c67d30d16eb632e49b1a13f30fdb2717aed0", + "blk.15.ffn_norm.weight": "0e9198f3b548eb78acc8961f2b3350d238d26cec110933ba753a8cf0035c501c", + "blk.16.ffn_gate_inp.weight": "a41d1f99d739c8b150c3945b6949763988d0c6a4c5a2b5855592ca1a48ed23d5", + "blk.16.attn_k.weight": "b624e2ec88c2d3047f60530fb87e72cb4a5e655a9663f6f3e9b09e5ad32cddaa", + "blk.16.attn_output.weight": "687759ea75e45108526ffc1573d6fdf084728079bfc2dc89b9979e76280f43c4", + "blk.16.attn_q.weight": "beff3a45c7e9ec82ffc6d3c701126be28654d10aabd747d03441210491fd31b6", + "blk.16.attn_v.weight": "43a349b13f0b9d040cacecd942bcb168c030fef8c75c987d59a4fce6c14e855b", + "blk.16.ffn_gate_exps.weight": "793406d6c13d727c82bb7b692ca98d65ca975baee69fc57be5378d77c5a19b62", + "blk.16.ffn_down_exps.weight": "9bad3dd150d0230404b7f886ac7ff8803225757e813f195cdb26bad245243b4d", + "blk.16.ffn_up_exps.weight": "7449d663023fea3496475bf0a9c1de7272ad0ce9adcb3265e8e424badaa674dc", + "blk.16.attn_norm.weight": "a424ce34c195a401df1ce37ac4f2794e8a6720b1ee8acb21428e2b68c65e0125", + "blk.16.ffn_norm.weight": "405a68bb8e16e1064df2de55ca3cd9ceddda1d9fc0af007a9bd7cad4b2676248", + "blk.17.ffn_gate_exps.weight": "97c6e5321491ca5dc039ee88da0eb0e78f347372785411809af84b3298cb19dd", + "blk.17.ffn_down_exps.weight": "1617ac19788a1be19bac69277408761e6bdf5719d63a8c7fea14d41cc27641b5", + "blk.17.ffn_up_exps.weight": "4ead1c365f112581c10610ea3f63d2a1474311d2503d2060fed4b458ef337f5d", + "blk.17.ffn_gate_inp.weight": "ed4b3393f2523f2b5e0fc7680a1caa2842e605728a529b5af68a7fa8d7abf940", + "blk.17.attn_norm.weight": "beac17ef86a7fb2b5840cc72f7a95a5e3d6bd24e7fa698e0b0ebb9bdac45c561", + "blk.17.ffn_norm.weight": "81cb58ec6d6dc02a0b4ede10adc336dc865fa76f982d4eab0e4a37b40f5b0fac", + "blk.17.attn_k.weight": "eab569e5ea8c8b05e5a6a209fba031129453c2e28181eee3e736b3b04b36bbec", + "blk.17.attn_output.weight": "f85b70f01438ce8fe5d10599b113f30bf18dee2bbae0657d3eba295870001db3", + "blk.17.attn_q.weight": "887ceebfbf6a2b94b43d2df4439ac3a5bbc29311d4b28addc04d525546032047", + "blk.17.attn_v.weight": "2df9414d65014c06a93da22ba3a668be7b83e2e8008e98d7771f7dfebed98298", + "blk.18.ffn_gate_inp.weight": "9b07741a0950fc667e5fd25937e33bc22e1f764f80eb4ff3119f005327ae0f6e", + "blk.18.attn_k.weight": "8649598dbb63938744c39bcda5ce8c31773e29c573be8d4d2c114f5030f8d3e8", + "blk.18.attn_output.weight": "f8e391adb92622298ca834d5d1eda48b69c3b1c51c5a584ef6c54a725c298d75", + "blk.18.attn_q.weight": "84bf8708a2eed618f48f69c178ed7dd11fa4c468102376e72e910ebd037d131f", + "blk.18.attn_v.weight": "31db3cd773f09548c2c1b1eac2718e46364a7810970fe9c433fad9d8de5397eb", + "blk.18.ffn_gate_exps.weight": "be2a2ba378002f1b61f86c273a69eede9b93786d5ce96b4fee1861f730dca4c4", + "blk.18.ffn_down_exps.weight": "d35196159e37705db50a5343e3989f7335477f1a4add67ef42ad64a638cd07ae", + "blk.18.ffn_up_exps.weight": "c6ceedd86e97913a6dcadc838e7abb762d629fb8dd55f15cf02fd9bd66d2ba78", + "blk.18.attn_norm.weight": "41f0b1ad83d6e3cb9fbe0d27878c2e7ad4a351b9f554a6bc9117c01745cdf6e5", + "blk.18.ffn_norm.weight": "96646204bd0d82f25dc77faba4dbd86b1332e449313e6684e00122da8be99057", + "blk.19.ffn_gate_exps.weight": "c6eb7f61e7938bda0492dbc05e51e8f631c99224fe18e99861fc4fc53ba9e9ff", + "blk.19.ffn_down_exps.weight": "4384803da3a3a3d44120d7dd192fe2c9bbd9a1a0cb492dbec1fdd7565230f1e8", + "blk.19.ffn_up_exps.weight": "22d73de2fbb8bb0f1bd2caf17fad8a355c47d914143f7f6e6d0128f66f074a60", + "blk.19.ffn_gate_inp.weight": "9a0cc4a2301a5634022fbce41189021bf0d1a961792d2d9330fd35556d18e5bd", + "blk.19.attn_norm.weight": "c5cc56ec5df9a1f7d5ad71fbda49f1433132e58895d45cb44c73420bd61ebd6b", + "blk.19.ffn_norm.weight": "77e17de741742ef2482fc7872fd423c8e3c1454dc4d2be89ee939084b6d78bc0", + "blk.19.attn_k.weight": "a92ea36ce2e3569656306aeefb835ccd5d1b03b33a86e0d3d030644cc923b813", + "blk.19.attn_output.weight": "5e2a912b37855f84ea964907a1a86d609cbdd79efa0c93c3e8e2fc07caf7c226", + "blk.19.attn_q.weight": "4ef3a5913292ac3c1a6fd3e9e53d011021f2b41d0276cf849706d1ca925cf7a7", + "blk.19.attn_v.weight": "42981b75b68ae852cee638b5433605c147da4392aaa6d7a06e756115b0171f39", + "blk.20.ffn_gate_inp.weight": "71381b9879a7c80b9f7b475abc0aa31b8cd71ccc00856ebe89764a2acb9df2dc", + "blk.20.attn_k.weight": "1928b7ebc054eb3967929ed6fb446314d5352f4aaf8b475ce55c6345019f2ea4", + "blk.20.attn_output.weight": "6071ecd9ca91af0d2ba93fef4a1a56f3b243dd70f862a21a2d164d56f386043b", + "blk.20.attn_q.weight": "002e95042a40f36ceed5829e3d0c8072e5f5e4ee86a089e2902b2348fed24dd5", + "blk.20.attn_v.weight": "42f509cdb1c0e298f89f896e349be86952c5168e49b3f83bb17badbcb7596d57", + "blk.20.ffn_gate_exps.weight": "a684a3ffe4b0a57c819a5fa9cb3521de223f392732927271e97ce925b6e33765", + "blk.20.ffn_down_exps.weight": "e3081a7bc7ba750d8a4886bc8ca4f231b55db4ca082b54b4106c7531964725cb", + "blk.20.ffn_up_exps.weight": "fad0fd5eca36ab154788da28be8ec25bb5d6db06c9d133db89e96df358a2f6a2", + "blk.20.attn_norm.weight": "c3e3f2429715ae95e884ef1246b0b461b23c5cc0ed08beecf70a14cddd184820", + "blk.20.ffn_norm.weight": "ff31f609dda65ca496b0584fabea6550e42edd05ebf229812aa6b7bb5ede15e6", + "blk.21.ffn_gate_exps.weight": "366f09ef0ecfb86808eb3296cc9abdb957951d27f6533c03f1422b54061da660", + "blk.21.ffn_down_exps.weight": "3fc495947d27fcca7fc0893c8a96e5d48ba27b2c8c58f8fcfb8dcfcd5539741c", + "blk.21.ffn_up_exps.weight": "6713ed51410bcc8283cbb001c4ad784098f25701e8021f4fa4f411e186859c4a", + "blk.21.ffn_gate_inp.weight": "6d4c92c01ec801647134d907bf1108878156df266a6107abc10526332b328b93", + "blk.21.attn_norm.weight": "27605719ae2df24f4f2e85a730927cab20367631612cb501631f6bbf38eb1209", + "blk.21.ffn_norm.weight": "ca80ee8177db185b15a4a378c1cb6f7143c76546a7f1726bda23f329323d4ffa", + "blk.21.attn_k.weight": "9e49f743d4a5bda9b4bd9c40c2ca37cdae5aec7e54cb193897ac8b4945ada14d", + "blk.21.attn_output.weight": "ab923540879753feaed152f5950f69cdd83d8f2413ca873f5f038b63ab0aea12", + "blk.21.attn_q.weight": "62617fc3f1c9d2aa672a4d91a121c7a91b92d145b65e75f0b06b4bb7c825dc36", + "blk.21.attn_v.weight": "15f8b2e72f8e8e992f2f6b3e93238a9d7be7bd6136f91c9d04b4b4cd0cd60369", + "blk.22.ffn_gate_inp.weight": "3ddb1773d9257b68add7a2a4e94dad25ed926803e02707863dd742ab9b2dc179", + "blk.22.attn_k.weight": "680e45a9e8d5feddee5266e119dc053bf80718fa9af1cf6803e6f493b265f1eb", + "blk.22.attn_output.weight": "0d5fae3402fb2c5aa3a860010e3973fc8e3168d1015f7a76b7b2964681693206", + "blk.22.attn_q.weight": "eee7e3d426ab533bd18d62c9aa142eedbde394bed07db58313e0fccc82a23237", + "blk.22.attn_v.weight": "26b5be1fe3c2b6824c5a648a3e4bdf17691904526fca158fbc3ebb627b67e2f4", + "blk.22.ffn_gate_exps.weight": "32ab7a7735313d60f6a75229b1aeee940b6aee176c9648536bf5921b0dc2929a", + "blk.22.ffn_down_exps.weight": "67590808f6a67777d3eb7976c31fe616d388b98fecbb12253b72d1241d70753f", + "blk.22.ffn_up_exps.weight": "fc245c0183e6d90829ff5e71a4ec93e4860b3d4c1a17b9dda2fb64f5f5c9ed32", + "blk.22.attn_norm.weight": "128e99d206d4d6724758ec97468af767fa0aea592149c324b731659c1e74a1a8", + "blk.22.ffn_norm.weight": "e45f498033f0cffa15da0eff2c47b4472e43fcf8921729fc4eeb2e3a6b3c78e2", + "blk.23.ffn_gate_inp.weight": "d63e686f5325fbc89fa242c2c52a3b8ff54f867dca914c9ae6eea13e9d6f46e5", + "blk.23.attn_k.weight": "f71f5a577f46ea12b1818f3a5ff4b85ddc45f9a2afb0fa2e041d71a3e31c6779", + "blk.23.attn_output.weight": "92b13563c1e0eac0d748fb67b235dfd7a64c8f16e2dafb316885744582e23b4b", + "blk.23.attn_q.weight": "2f9b9c35dc4f912f3f51c06e2d68f417b51a0de0a84aac530a64f9d3d7b0a2dd", + "blk.23.attn_v.weight": "268e40813806e74a5c364b19556d087bf8374e76e7b6fcf55c381eb7da13ccd1", + "blk.23.ffn_gate_exps.weight": "12f857e7a7ce228afac34d99b602c8d6fe96984f2a21118f459a58cb767ee65e", + "blk.23.ffn_down_exps.weight": "cdb082c16599c3bb36a28066dcc122d9529b54fa91b6cf0153437ec960a5e16d", + "blk.23.ffn_up_exps.weight": "f4b99f6f44d7b8b5a305894e88633bf5938fc1f6303a2b2092399da9c8b64d7c", + "blk.23.attn_norm.weight": "a691392210383915916b4d3886d5e4d56e7855e27e37e414fbd73bf66b3712e6", + "blk.23.ffn_norm.weight": "0c3dc72f667e5ae19b69bfa9f2bd2a01a57681f89ef9527bad4eb0d8c7b70da8", + "blk.24.ffn_gate_exps.weight": "86baca2a3157994df7fd8ced5e08436d5c1810dc29c0715637c36de723e0e7d1", + "blk.24.ffn_down_exps.weight": "ac5d559562b35c34993e34b071f66d15c65be5907797078c2d2a49aba54e3192", + "blk.24.ffn_up_exps.weight": "fce0a099cf09777f44fbab3606ceb75f7fae6f0b80725f9e871654b8cdf9262a", + "blk.24.ffn_gate_inp.weight": "e7c6800c0cfc56b565b2d35ad6f1dbfdb70dd0b05b338bc8da2286ffc3678d79", + "blk.24.attn_norm.weight": "dc6cc18ec52d102d015153c4a1132f9d7a504e29cbdec81c5edbf3b9e65815e1", + "blk.24.ffn_norm.weight": "480d5a1397af5e0e657f1e67d20ec0cdef5724e71246a326843321b87ffabd33", + "blk.24.attn_k.weight": "338c0597954a9b95a782545b2fe36469553e73f86ae2d2b5697767b28e1c7daa", + "blk.24.attn_output.weight": "a77d23b79933c67e52f1eef7f83a3dff4f767ce0bbcc39572f8cec4acd457643", + "blk.24.attn_q.weight": "45c9478593002be1998e96e70668aafa2dd3972380fbc1df12fb05c24ba959e0", + "blk.24.attn_v.weight": "515729420885408a6a9614bc27cda393ed907521318d14d21335d39a3eff0b61", + "blk.25.ffn_gate_inp.weight": "aae4ac40e9ab3925241f9d784b54b38851d9bc999a6c3bc03fc3f17c9b28a67c", + "blk.25.attn_k.weight": "4ab4808d02396c35b00b426f536015673b71c17ae6cd55bbc2e6bfe7a4c59d0c", + "blk.25.attn_output.weight": "1990bb982b77e0c947cd1a8ef0b36227ee1259e6dbbc2829e5c136edf88675eb", + "blk.25.attn_q.weight": "a1490f3048e8c0ec8784f8550c43adf5cc8d0f2f90131c934713fe4b1b015bd7", + "blk.25.attn_v.weight": "f15e53c6d45b3b6f58808fa968425d65e0b26b7f9b268127a77abb1227c67431", + "blk.25.ffn_gate_exps.weight": "656662447ff54f56ee80f78a1b9483f7efdc40f7375d0cd8a9c72ccf21f77e7b", + "blk.25.ffn_down_exps.weight": "db06f101bccbaef19cced0f6c185166e18202465f4a42cddfd535fbe5cbabb4a", + "blk.25.ffn_up_exps.weight": "584a7b02456f27fe1d8d3c7ccd21d426b6ea887795a3ed77f704596a1e3841d7", + "blk.25.attn_norm.weight": "8f0f3597982930fd237e9d609776c64f2b909a455b21678f83a7ebd4bbb83e64", + "blk.25.ffn_norm.weight": "3e7079c32582afba0c55e032f254adc18d2997705eec860185e9a6dd3d82f07e", + "blk.26.ffn_gate_exps.weight": "e70341691b583b86489812b29b77aa41eb658b1865733d6118da54c66e3bfcc6", + "blk.26.ffn_down_exps.weight": "5c1b812d11dfb064af816ced5ab6463bf9722eefdfc341b8a93705d5038fd781", + "blk.26.ffn_up_exps.weight": "e18118362ae54ef7432781c83884f9fb230a9d934e342aabeda8822ea5f71fb6", + "blk.26.ffn_gate_inp.weight": "cd1c5f6710166b9567c6b74c97b2348b191c60aa860958c6bc264ab095261dff", + "blk.26.attn_norm.weight": "71d087531af2520bda2e676c489e8529cef5db8aeea1eec0a937a8b4f2fa2e54", + "blk.26.ffn_norm.weight": "7f704e936fda28eb5c2cc339f0f6a5f78170b5aa43c01265b21668870d819c82", + "blk.26.attn_k.weight": "1cc62a0ce0ae251275d898c52c4a9fba5995fca10955d2011d10dd1a59e1afb8", + "blk.26.attn_output.weight": "636e881b1505f9cef656a4be98bec6a4765321d51f9bf1dac8933397cf44b765", + "blk.26.attn_q.weight": "89a3c4d202d7d6adebb9e0c1bcfd8b775f6456386f1be25e86e43acc949c1e16", + "blk.26.attn_v.weight": "ff2cc963b597cdf1a21703f3e7022af3bb4c65a34a19e19d9309a7c5e198b5bd", + "blk.27.ffn_gate_inp.weight": "6150139498fefe380bb99d11e72028da47a15ecb73dfc5b2774f726f4bed8f9e", + "blk.27.attn_k.weight": "f286eb9e5c56c7b801a497aedc40158c2a27877d7f9fb59b3fc67834798902d2", + "blk.27.attn_output.weight": "5dc3d3a05f9f7729509147fd09c16fb53f85f520cdab5cb69abf4bae3fd460c7", + "blk.27.attn_q.weight": "8462e40f86b24251960d6f35a9ea99b8793a01937faf1aec2859f2e5395dbb61", + "blk.27.attn_v.weight": "bac1a99e38e25953f8315f7212eb9777dc216cadb09b959977885ae62724ceca", + "blk.27.ffn_gate_exps.weight": "6a15eca7f0f6ecfd93db2e55c63875348ec4a78c4ff643ec46df9e958c0101e4", + "blk.27.ffn_down_exps.weight": "2e1c91247c4359e2073a8e5f26fd7f6426da7be3ed5bc65dcfff701f0a5022b2", + "blk.27.ffn_up_exps.weight": "65d6f5c553c9332085eae4aeadf25090b5d7768212ea7b08ed698102c21b29a1", + "blk.27.attn_norm.weight": "7fab8ae63ec8e91ce625cd130ab96d8427dad3a7413bb21b25ec5f408c5b9f5a", + "blk.27.ffn_norm.weight": "532720546b0fdcd423a02ca6e3e9d8aacb84b1b3e8269968f88a47fe2a69bab4", + "blk.28.ffn_gate_inp.weight": "a305ea58d98962d9dcf0c53ad2389b7acc8936fb35a0e3fc9410e7767cd49dea", + "blk.28.attn_k.weight": "8315e8a2e4f78dfdf36d4fc18fffc74bc95fe42c3ae4f9af2b6c874612c0f71b", + "blk.28.attn_output.weight": "9b5fdedd32d39ef46a22cca7cd5355d7b93bd07ea305f466a8aad6ca5a4f3778", + "blk.28.attn_q.weight": "4e8fb96997c30e231c437130f410d7c91d541a816f6c568b5f3bfdb4b8dece74", + "blk.28.attn_v.weight": "1fec739cf3bd7b4913f72ca358d4cf31391c304de44ac0ae31ecb825beaa7cfd", + "blk.28.ffn_gate_exps.weight": "9f259789d535e09268266b9a8020f32d6a6779966c909d91d3a10574f06238a2", + "blk.28.ffn_down_exps.weight": "516d3f8abaedb01b9916a4b67d4672159769138ef2850158bc1b32c41e31f0e8", + "blk.28.ffn_up_exps.weight": "f2f1d88d2c31ed588806fb5ad981d68f5134d7284c4fc022fd018de2eef437fc", + "blk.28.attn_norm.weight": "960fd005598deadaebd969996f4367a9dbfad90539a863674fe95730935acc64", + "blk.28.ffn_norm.weight": "e1993b37ced93d4049e9af2c47b0d9207d8f7e6f2cc3a52f57bef30bc806d805", + "blk.29.ffn_gate_exps.weight": "58927146338f443513337476b3cd30e6341742f096c2beb5890d400f10121298", + "blk.29.ffn_down_exps.weight": "03a3386e4f0b75a28c5608e23b2de8f0de25f21954e4aa7fc343431bde9db07e", + "blk.29.ffn_up_exps.weight": "6916b7490a7ae7b04a5d81cc1e7ac9b20c483434f3b186b12d87fe176bf1567b", + "blk.29.ffn_gate_inp.weight": "98e710e467a3d567abe4ce29d78b8e8dc033148762290c0c5e1ae4d78efd8c78", + "blk.29.attn_norm.weight": "4e64cb307d37be20d55f38c94faf7e451d11df5e60df347906cbaf9c5441be71", + "blk.29.ffn_norm.weight": "696c23a52f742679bd44440d687a4c44b4302d57f1e9dc5610d23374336187e7", + "blk.29.attn_k.weight": "e85253652fd6120c623634ba66b725bf7cd491318b54ccdad2c7df8851d64c0a", + "blk.29.attn_output.weight": "4f650a71efb150d1f24cd4d114d4187bf570ac424da3b92ea6455abdf1aea705", + "blk.29.attn_q.weight": "69fa7da901026ebcbbbc848455b425458b7e3295007d7fc093acf4b38e2166ea", + "blk.29.attn_v.weight": "17e2e7590b317b21f106de546aafd955579703d1e95d6aea044ee72ec3a514c9", + "blk.30.ffn_gate_inp.weight": "3a03284b4aa60d59d4a2ec86253469b61fc656372afca427cb77a5332fbcc62c", + "blk.30.attn_k.weight": "d518cfd0db9708e769eb1399e87ee49357dc54d5afdbac3d4c0ca46c64e789eb", + "blk.30.attn_output.weight": "9b44378714d784c5ef9ab604359091baca4e0ec222afa139b7f840eaefb371fd", + "blk.30.attn_q.weight": "cbb95365bbfbcad0c9cd99b4eebb5a5d32de68ce08e4063b5ec3e792b7548044", + "blk.30.attn_v.weight": "e7985c04fe1740e35a9598f43b67b0922b4fc2d00b68a92a9f917b82c3248de1", + "blk.30.ffn_gate_exps.weight": "8ac4bbd07935d98f895ba94dc174e5ad5046c3c222b53729d60f987c05e7eb70", + "blk.30.ffn_down_exps.weight": "dd672cc71e82abf05064a18121b8e55fe1a4f19bc1d7cb9a142f4add54bc336e", + "blk.30.ffn_up_exps.weight": "12282f664a2a12aa25e2deac58946108715ebb978bafed5274cef24569107646", + "blk.30.attn_norm.weight": "1a33458fee054c6c9c896a4bb0a4e1fbfa0293b2408c7dd2b81d692e966e7273", + "blk.30.ffn_norm.weight": "311e33b68051f507f1478ed8f2693fddb846170ddb7285a91be43f795c2ce31e", + "blk.31.ffn_gate_exps.weight": "8af43d9867a51cd8392fb48b981b0ceee0ae979c491c07d711b3b56b5162c786", + "blk.31.ffn_down_exps.weight": "5579cb7758c1600b19d1f540deffe081b575962e37437b3b2efb2fb0a2924e40", + "blk.31.ffn_up_exps.weight": "f2e7c005276b3a001fb40753f027fa10b4d5a346f43cf4b4bbdeec6e74e1cf6a", + "blk.31.ffn_gate_inp.weight": "89885dc0e30b6b16a90c0331d7fa3174671e941364e8102d934f02132237e61b", + "blk.31.attn_norm.weight": "99e4e9bf86a9edf8c404153a7e8a82324ba79da462622196e2faba161bd95172", + "blk.31.ffn_norm.weight": "55335997cf6de781bf332b943de96ff4646966b05d9fee86b76ea897e27b6ca7", + "blk.31.attn_k.weight": "cee570762b78da6316b637892cc4b080e40f57af5551ffb1866b9a8e80e96628", + "blk.31.attn_output.weight": "fa321ff55ec7819ead7b819fd45215262f39744569765ba2113c989c03588802", + "blk.31.attn_q.weight": "9e2c409b878f8a2a1436874abf428fceb1c534b21f9ad4dd6f532b8a469007f0", + "blk.31.attn_v.weight": "a845d0be68ba537b4a775bfba4d897faf7c82a811a2612b0b7420cc4f3574cb8", + "output.weight": "16101cbb74b54cda9ebc07ca3c762e3263a56efb3cc011156184b95807d7cf13", + "output_norm.weight": "d7aa61585baedd60157aafe157930785742c55989c288573566a971b02423564" +} diff --git a/convert/testdata/Phi-3-mini-128k-instruct.json b/convert/testdata/Phi-3-mini-128k-instruct.json new file mode 100644 index 0000000..19296f5 --- /dev/null +++ b/convert/testdata/Phi-3-mini-128k-instruct.json @@ -0,0 +1,225 @@ +{ + "general.architecture": "phi3", + "general.file_type": "1", + "general.quantization_version": "2", + "phi3.block_count": "32", + "phi3.context_length": "131072", + "phi3.embedding_length": "3072", + "phi3.feed_forward_length": "8192", + "phi3.rope.scaling.original_context_length": "4096", + "phi3.rope.dimension_count": "96", + "phi3.rope.freq_base": "10000", + "phi3.rope.scaling.attn_factor": "1.1902381", + "phi3.attention.head_count": "32", + "phi3.attention.head_count_kv": "32", + "phi3.attention.layer_norm_rms_epsilon": "1e-05", + "phi3.attention.sliding_window": "262144", + "tokenizer.ggml.model": "llama", + "tokenizer.ggml.pre": "default", + "tokenizer.ggml.add_bos_token": "false", + "tokenizer.ggml.add_eos_token": "false", + "tokenizer.ggml.bos_token_id": "1", + "tokenizer.ggml.eos_token_id": "32000", + "tokenizer.ggml.unknown_token_id": "0", + "tokenizer.ggml.padding_token_id": "32000", + "tokenizer.ggml.scores": "6e37bcde2adc7e350e87c496eddd7a2124329c1dc66c5bf3ad3997253e4f7a62", + "tokenizer.ggml.token_type": "b6ecf55ec64ee67d87750bdb8d757a2c58bf78377e9f4219f5689a6c4dea57ce", + "tokenizer.ggml.tokens": "d168da3ddd3eee820916945fcb9baf24dd3cde42f606cffa2d19e7c8a8743918", + "blk.0.attn_norm.weight": "216aeb2c9e0c271f899e1ef2a63cceeb8f41e97642e84fada54b1d3c1c11cf25", + "blk.0.attn_output.weight": "b597d56f7188ffc1fafc273fadc59d41738cffd677ae98c61a62c3285b3a3099", + "blk.0.attn_qkv.weight": "d28a6b44e13f59be5483e4be2bedb544e346168d720aca27f47d1a5a722be91e", + "blk.0.ffn_down.weight": "4a691370e5a61fcbbf540fbcbf4c0f1d15dec0364528c0e916d0744f6262b63b", + "blk.0.ffn_norm.weight": "0c00af2b4a3128bec64a0cbb1084b042fdbe13d9ad0d03bd577f9449dfead338", + "blk.0.ffn_up.weight": "b32b52f790c1c083bfb8a3126dc1111cfeeb28dc8c584a930a1e5334cb176bf4", + "blk.1.attn_norm.weight": "68748011503c6c029e8e69a84a8e5a89338f378769627b6dbf7f93d715c292e1", + "blk.1.attn_output.weight": "2267344add13b048ca59e4377c86dc512be8046a57156901fa32a20fa74e4ee0", + "blk.1.attn_qkv.weight": "9109d2e3d7a2eacfda5226587b8be124a3bf44b972da7ebb17aa15795897eacc", + "blk.1.ffn_down.weight": "d675df4df4dd039c0c339ad6445d39eddd2004db6bf35bed6314c7497245a633", + "blk.1.ffn_norm.weight": "3b5767ae977bc8baaa06b06efdbea193b6b3ba605ce76d77a76ce317e935500c", + "blk.1.ffn_up.weight": "80dfd6d9d234b00334c89b8e0a02f81899c2efd377321c34ba5ba51a5f61b5ff", + "blk.2.attn_norm.weight": "6a6743b057e5088f145bc179e92c9bfb41163e7295d7b81c62e23dd89d2b59c4", + "blk.2.attn_output.weight": "bc5491ea54e0db81462d7d9b7d25cbdda380c2db8de041bd1c4ab7b76a1d19c3", + "blk.2.attn_qkv.weight": "a61287a9852e2f5aca9c100b471d98398b2913a3497c743de3c70ec9ddd7087f", + "blk.2.ffn_down.weight": "4fddcc382c8dceeab027fe43d8d44e67edb5e8ce4b9a1b7f773c87770380ade1", + "blk.2.ffn_norm.weight": "07e05f82b3f63f711db3b684ca79aed25c0657917e66f88af47348a82065c227", + "blk.2.ffn_up.weight": "4835a682ef1826c12df01ae7663fc45f9c82bc8e64b665f13fb7da8e201ec0fb", + "blk.3.attn_norm.weight": "f22aba7c03999ba7136f39cda747a39715e498699dc1716cd97fc5dfc58d1b1c", + "blk.3.attn_output.weight": "53b579855366fd786c5126b2b30aac4d583ca7bda56833c4865f5cadb5c18c6d", + "blk.3.attn_qkv.weight": "bb56aba78158123140fcea59c69ac562ca208f6d3086819417cdad8c50f333ad", + "blk.3.ffn_down.weight": "97280897a7cd86db2830c004bccc5bc094f50e293baded0189159a2019145a6e", + "blk.3.ffn_norm.weight": "10a8c99f8b57a960e8e0a1133c4a26f9148403d1b9bff2eff114917de996f3b5", + "blk.3.ffn_up.weight": "7324046c915e75d621b2043597a245a428d8eea31869135e6257a861491d8dcc", + "blk.4.attn_norm.weight": "507d8e164de94646edbfe33def8e8fbf7c9a6ee3fbaedb5000f72d9f51ec5e36", + "blk.4.attn_output.weight": "bbb3429e6efa98c150e0fdbf48c16180cbf0d0cbc1b3c253c6c319d78f4593a2", + "blk.4.attn_qkv.weight": "b95ee5be0786d3901273d806c339fe6c20e6bfffd2a20672a9f56af80921e8ab", + "blk.4.ffn_down.weight": "806bbf91df92a5a22bd5aa1ffb7fc2869f7293ffc7704771c290ecc583b27975", + "blk.4.ffn_norm.weight": "cfc2930a81df7aee3a5e7f726a15c1182233e868bf0d9d37f6b6ae6d8c15c234", + "blk.4.ffn_up.weight": "c3390c69533de2c8424e8069323ccc5d0c4543111535da04cf2c7d26745576aa", + "blk.5.attn_norm.weight": "0d71c4fbcefabbd021569442853d2fe90668b19409ae2805a718a829ca60beab", + "blk.5.attn_output.weight": "10ebd93629112bf2df5c30dd0953a4a5e9020306768283181ed426934d47e14f", + "blk.5.attn_qkv.weight": "5cb05633369f12d4b00e0ff787736bd846856682115720ebc6cce05270c334f6", + "blk.5.ffn_down.weight": "e28bcc5094212eafc7476dbc5b7a520d25b79578cbf4229d698e2655956a80ad", + "blk.5.ffn_norm.weight": "b6f2c4cf9f34bb4d59989f96165c14a67dc1e266ad0a6d0fcc49f1add929e6ff", + "blk.5.ffn_up.weight": "0f9ef99423cc07ebedc0e9cfa95809f2d7108d910bb4ef97ebc0b0309c440750", + "blk.6.attn_norm.weight": "b3edcc47a42218234f7564d7470611b49401a41ae8cd42123f86557c69f5d7f2", + "blk.6.attn_output.weight": "eb9b7d257b388bb5b8fe0515e5c6873317239cb94cda236e4b6ada2a6c57c65c", + "blk.6.attn_qkv.weight": "eb968081f478c52f07bd9c2761741e982dba33cc4eeadeea3557d391b9ac2106", + "blk.6.ffn_down.weight": "1b8588bb7463206290322695577dcfced300895d6e6f4b26966c53a9ae2f0f84", + "blk.6.ffn_norm.weight": "1219c04b7770983c77814200eefe743f46d15328ea2b12711e44f8103eab08d3", + "blk.6.ffn_up.weight": "197ef287239fec47c55677f0fbb66eaf0644f775bc382de843971730721394f6", + "blk.7.attn_norm.weight": "b630ad08c80d564ed1c024384818e9fd3f22a36cd7a14aa96e7e2759a8285099", + "blk.7.attn_output.weight": "970255aa750828a47d6b9d399f9612b5bf25aefe7dadbcba41fc416d0d4067c1", + "blk.7.attn_qkv.weight": "ebb157c880293e6de8d629f263ba8853ed1dbdc02c311d43432bb8cfbb310739", + "blk.7.ffn_down.weight": "24bcd4db4cba844c89f878b81843c373dbbc0675e889d32c5b12e63384a7b670", + "blk.7.ffn_norm.weight": "b9c6f71001808ee873ce7db8056e4b53fb4cccec8b7f0f312899b575fae39d39", + "blk.7.ffn_up.weight": "979f1828d227455c26015a2a11afe9dd05f2bb97a8ba6b38c8dab3f50e627401", + "blk.8.attn_norm.weight": "4e8e347e3775010b7112ee630f2f4f2383be7ff64e6ca6154b9b22566552eaa6", + "blk.8.attn_output.weight": "65a44babf44a435a1829945211b3168f9ec78ac3cb7a049a733e93d11f0d6659", + "blk.8.attn_qkv.weight": "343ed07671da400b040812a4058482fa38284b5d9af9becfed07417fe26ce747", + "blk.8.ffn_down.weight": "7fb7e073e3c2c503c4e9d60efa0988fed7398d900cc003695fe3fffd3e188b82", + "blk.8.ffn_norm.weight": "b07c1f655d8593e3892a2cf73f8a0c19ce8e5cb613fafbe7cbd430da8ce4c57d", + "blk.8.ffn_up.weight": "8b26e14de54b3fdc2e2d3ea41720f9d9c236a93688c3b7fd7bf43f5fbb327c9b", + "blk.9.attn_norm.weight": "46394d408a8e316916177e6aa261de32e137a82d729c0b1800b072f0c38c39b6", + "blk.9.attn_output.weight": "d57f3d46107947a7073373a0b35d6ecf7759b5df15406f4a3590a60666af6b16", + "blk.9.attn_qkv.weight": "14bb8ace8c5453148f4b536e9f4279c813f31136716947256f5cca333448639c", + "blk.9.ffn_down.weight": "2b8d98e2b5ed68338f6e4de43bf7de0c4858cc69103cd5177725f7444eec7694", + "blk.9.ffn_norm.weight": "41a499dfd418cc4c6b8c12313f673f7e2cd4a3f9c4065eb6c4feb5eed02fb542", + "blk.9.ffn_up.weight": "143aab7533a64b17fbe201490a6f674bc7f0bd370c094500b2e100419073d1c2", + "blk.10.attn_norm.weight": "ebb670aafd36816a794347287269d8f1a5b19c1e3c0a1e38023bc19fdba9b073", + "blk.10.attn_output.weight": "b5d65bbc0ed5e49fdd9d754bc18163cd042a285024d0cf6f954c503bc8c877cb", + "blk.10.attn_qkv.weight": "f06b15bac88da798fa34a62b03eaac0dbe8b846020516603c387541f2d8dd672", + "blk.10.ffn_down.weight": "fb091fcd1b4de25d1bea94d1755e255cb02914a030d23e3a234e57b8d46bde6e", + "blk.10.ffn_norm.weight": "eb347bdf9c40414af87e13a8e72e40b31f004b50f7cb366f1a219ced60a61355", + "blk.10.ffn_up.weight": "ed2d52fc881a173f404fe8a1067862c9856d6c3e0d2e90a330a7aa394e3f84d1", + "blk.11.attn_norm.weight": "64e252603cf010a0e502ca39fdf8d0a196a79aec67c0d2bb9213fc0cb80c47d4", + "blk.11.attn_output.weight": "228e33e21c69f52efc74fdfc831bc9af271e44b2a29a3dced1d64e667ce36eb5", + "blk.11.attn_qkv.weight": "ab9ce6d4ef9e42ee0da3f20a7708a3bbc5e79e967b05fa86ba946a05e2eb63eb", + "blk.11.ffn_down.weight": "0ca133b7835c98dc77c25d64e4eb7873778bdb5e4d22d8b80f920f46865b43bd", + "blk.11.ffn_norm.weight": "02455741a0dfd161c79aa1ecc381901721f229fdcda5615622a629631fb61cfd", + "blk.11.ffn_up.weight": "9fecdcc099fbb8e23c6b1ea9294702a027f4a58d265543ec5e7be79b8f63b354", + "blk.12.attn_norm.weight": "783bb459911b1b3609a9b2bdfe272f1670add73b5471da738e07ac47e2e07dfd", + "blk.12.attn_output.weight": "1e1a914c9e48b857206ac5a1f7cead994bc1ea91d5d4fff8c834d73f2e38ef5d", + "blk.12.attn_qkv.weight": "5953e7185ccb87fb4dae8f9426ec86315d4c7794326e8ab59b3a95d4af2189f0", + "blk.12.ffn_down.weight": "a3eecf0f394f86e2cfb48a5940a5c50ca86d71883b2f79fcc642a935fabce0d4", + "blk.12.ffn_norm.weight": "0a4272e41373c23bd72f10d2d82930aa3a1480aac75832bfbf01cebf0b86b6a4", + "blk.12.ffn_up.weight": "06f42776de3a7ceac3025f26a7a8bd20e062233cce2bdaa2183470dc4b30b87d", + "blk.13.attn_norm.weight": "5915da60fb03e201fa649faba780e5fdf1c761c262b206e5415cf83181f65780", + "blk.13.attn_output.weight": "4dbf6eab074fa3835fd32bd631a8208e511037d5056d2fd3015735cca7674ef7", + "blk.13.attn_qkv.weight": "d3d8339a1c4782d9e73d77fdebe154d3c5b83ac40c9175b3e91a4977d08f876b", + "blk.13.ffn_down.weight": "de6772b46a55e1fd42b007637dfbf68b6598e5d5b61622da0935002e1e192d3a", + "blk.13.ffn_norm.weight": "5a640ea3b8c7be49c95a58a2327e10d8e8d9d142504bde5c8091613e5b961d7a", + "blk.13.ffn_up.weight": "f35e3545e4bd3531b2e843b5efd31dee0c13c807ee6386e65473ba67bbec30d0", + "blk.14.attn_norm.weight": "9b34986450b7c98b4927e81e61a816f9e84b1addc7c14926402100037aad6678", + "blk.14.attn_output.weight": "155d52efb23d366016d861a251d4d1f4a0c13699188c50d50dba016a0d8bfcd9", + "blk.14.attn_qkv.weight": "8e1415084e1f33c73a777f19e752489f4dd312cca047733e5ea643cd4a955e04", + "blk.14.ffn_down.weight": "a2a142226b94baa01ccb65bdea2b7418e49085c1d9c3c63e544e3112c58a25da", + "blk.14.ffn_norm.weight": "8aecfd9b0ae6affaea31a80c5c9a4a14b31deaa0db7bd8f6da2a64d23447921c", + "blk.14.ffn_up.weight": "0c1407237b8c1bd02f193346b5681926fe698a5055eac6a7450451b0f991707c", + "blk.15.attn_norm.weight": "e037bd19880bfa83d983200fb0c7866f8ad16c3ff5cc4b4f3a37ca7373870ff6", + "blk.15.attn_output.weight": "045fe4fc95cc129a1b92771b179c11b12845c4c088786c607f17bd98857e68e1", + "blk.15.attn_qkv.weight": "7621b7559705cab1d4dea1c69f76dbf9dc1c8837a203b656f484703b9c1b70ce", + "blk.15.ffn_down.weight": "7e5ac20e290bc60761e1cd972354fde225b7fa861048d44d9a0dd9b046d55f58", + "blk.15.ffn_norm.weight": "b6d830d88f1db1825687973c8c2b1a24c6fa84f07af8d0e3ef9c86009baca0b2", + "blk.15.ffn_up.weight": "dcda0957cd04fc45476774dba2bbf9aa89d6b05d5ca7b10ae6f73ad2c49b1cd3", + "blk.16.attn_norm.weight": "4ee9b70ba15cb2a08240f93990e90f5068c48fceb481f8e2186bec8b7214eb3f", + "blk.16.attn_output.weight": "315cfe5536658d2498192b2980eade15b2c9a4ff220e4011911457b1727fa103", + "blk.16.attn_qkv.weight": "3c8122e3ad637583b9dcde8ff3a323267d3014bb1f0f9771e5322260ca9ecc8d", + "blk.16.ffn_down.weight": "3b5fbebd5ee2b86cad96fb8a9b45a8770d08f82c1c8b74d7061e866f7020a18d", + "blk.16.ffn_norm.weight": "ffab69f20bda372de6e5878f0539163e2fc6ba113621ded95705fc3b1465c9f0", + "blk.16.ffn_up.weight": "0935ea3d258da42d6258406365f39f58ddaabfe97ea5977580db3635188f24a1", + "blk.17.attn_norm.weight": "f030441733f3d147b4a06a1eb4aeb8465c7c24d9c53bf4c48fe7e134d3629803", + "blk.17.attn_output.weight": "07a955ef09e8dc766ac0df647d0b2c69f23c4c69a7137654b4aad80303ed0eda", + "blk.17.attn_qkv.weight": "1c10688061e21e2fe12ad0cb54bf03895c1f83c3b0df743a42f548b52cbca1b2", + "blk.17.ffn_down.weight": "ebb9cc9836f41d88fdae2aa9a4355514e4edaec8d1577ffeb947a35204e77f52", + "blk.17.ffn_norm.weight": "50aff44f6528b13db5389f2ddcdb7676244947610bd7ffbff3f881c968c2a0d4", + "blk.17.ffn_up.weight": "d716537949582be33bde6b02e38f5a70081c9642a9fb05a61312126718b8d148", + "blk.18.attn_norm.weight": "0ea695c4e53d637902f46663a6ee42adc493c36794476acc7dbddaa05b13840d", + "blk.18.attn_output.weight": "5fd35b500221a612eb4f4bddf0e9b6b7db4d7733032a75f8802fb2d884647c2e", + "blk.18.attn_qkv.weight": "b0da37fd030fe69581f990bf23bfd35467a1bbe558af6de7c0924f6b72e92317", + "blk.18.ffn_down.weight": "b355c33f44b328f4bb977567de8f7544db4b005d7a8fbded658518ecf3c5a153", + "blk.18.ffn_norm.weight": "58b3fe9094079989a86e0387143259e1cc35952d24dc3df290c4ba6df44f5c51", + "blk.18.ffn_up.weight": "2ce530954c342c30ed2ead5353f931960bfae1d278868504c0efb973560fabbe", + "blk.19.attn_norm.weight": "533e9aed66feea8f0392aa81f9e293240e1f009a5334253915fb60c2749b615d", + "blk.19.attn_output.weight": "84f2d00f98a4113a779d3b5d1c3e7c914eb47784d3ab13b290367c124c2994aa", + "blk.19.attn_qkv.weight": "fbe6b9f53b07fa7537d3b3d452d20a9bc666f9fd41ec2091dd28bc2f70fc668f", + "blk.19.ffn_down.weight": "b30199e098c8bb3f890183d8b18471e80b62b604729b277ad62488dd71e1206b", + "blk.19.ffn_norm.weight": "c81373e41cd340b7badb19f9517c77c4250b4eb9a02dc758b8b49b652487d7ff", + "blk.19.ffn_up.weight": "5a5cb083ca7725720e3a890f7fa46354760e8007a8188849a092e305694a75e3", + "blk.20.attn_norm.weight": "4953091b4477e354357a8e743ba0a1900633e52f1599ee082a0c9b0b2b5cd978", + "blk.20.attn_output.weight": "62d54f7749cd6856097b2632066a322b0296df915fe66f382c5b5981be0d4f23", + "blk.20.attn_qkv.weight": "406de9e35b0729ebe902d7a47905cc7fb29a921431ed35dbef0c03e5690a1329", + "blk.20.ffn_down.weight": "62fb678b0d1261e19a4903a2b347d67afcc8acff01feb33a687a35a2d1e6f9a5", + "blk.20.ffn_norm.weight": "cd9d36b7e71e55c8925b97bb09c28219f182626bcff094878ae39c3db887a14b", + "blk.20.ffn_up.weight": "b9276771d79d3e932e73ccc520c3f8476342b9ef312ed2ee1e0da822e6e3ad18", + "blk.21.attn_norm.weight": "66d8c8a35e13ce9c2a0e75b670150e2c31484a55c2316df46075312196178ed3", + "blk.21.attn_output.weight": "12ab46c9382648f9b3350fdd92a6be6352743d62d6b520d7e2024e0c838588f5", + "blk.21.attn_qkv.weight": "a7909676ee1675ca23cd29a5fdd226df8dd9d68f94c6c9bbb51dd9fd38504008", + "blk.21.ffn_down.weight": "6fb317279c6542e82f97d5a12a60fac1bd0fa0405154f9fbe265e2fe39bd49cc", + "blk.21.ffn_norm.weight": "c0f703eb3ff161b5ba4490d87d8684b8a6c47a8f433e12f418333b9db439010a", + "blk.21.ffn_up.weight": "6dbdb80ef0c35e364bbce12d40d5e74c7963c7b55d58d9579567a07ffce7b863", + "blk.22.attn_norm.weight": "f94237433bf03d675cb2f655b81ca91a1ce2447bc6b00b13d6b0ccfe2d411eff", + "blk.22.attn_output.weight": "e821f95995ce497c01e63ca64f737713b1b65f11df1903e51d444aa516f33f71", + "blk.22.attn_qkv.weight": "1b0f717c73afb5eb4c82a1708c4e85c969e8a2a8770d9ddb78b1870a2d8a781e", + "blk.22.ffn_down.weight": "0f33f7a3cdc685484be99aa0c03642b0b20850a27d1fddbe054b13a9382f3ccb", + "blk.22.ffn_norm.weight": "9df285cf211ddd7df2b36a50489af574755c7d4d98b29a05cd04566ae613c8dc", + "blk.22.ffn_up.weight": "63ac300e1efb34041dd0136cf43ea622fac6f0caccce1cd9262f5e08d2cf179c", + "blk.23.attn_norm.weight": "5f72d9e88689b4027b28f5f8f26cd3abb03635ceea7ec98a4c91a9fc691f6707", + "blk.23.attn_output.weight": "6ecf04ff61125c5fc768f8656497152149373daf321ee9c957e8f7245a1184d1", + "blk.23.attn_qkv.weight": "a9d9978806724c2959f2cf386c233831f08e1e933dbf2b32665e788d9d512ea4", + "blk.23.ffn_down.weight": "72c7d17886a3da17fa0daa456aa5e877b2ef5b8b403182b870d9ca5ca9c70347", + "blk.23.ffn_norm.weight": "971e4b712e3025a13419b5b57d674b5e4ab7f18f74b57b9afc4671623da90c4b", + "blk.23.ffn_up.weight": "df2b5c7dbd5834545b815073af0c7355b065124e6d6f0fee78d8fa5b2076dc3e", + "blk.24.attn_norm.weight": "c41957c4a79ad3b16f6e11daec1c7f530b9f3f4b618e1e4367c3b67787ac4ab6", + "blk.24.attn_output.weight": "ef7d61f5fc88ac6f31bf60cb5f4d2d6b8df42d38825807112361a7224b0dee3b", + "blk.24.attn_qkv.weight": "3e6a58fe7d49c90bb6971efbad3371c32256881173ea5aee4b0c296cb206490f", + "blk.24.ffn_down.weight": "f43619144047de42fed81dfa495f1815d3cb771330e574043e2b67620819292c", + "blk.24.ffn_norm.weight": "5501d4a2a98c8ca6b42e77b53b221dbc08f530f6a067256d787534ec6fe028bd", + "blk.24.ffn_up.weight": "d64c8b0e509e2b1118f6000176f8956cacecdbb200c7e95ed93fb78b6e26c84a", + "blk.25.attn_norm.weight": "502fa3c302d371f61c5791f4615b73018ffb1daa09b6499b227116581244c5d4", + "blk.25.attn_output.weight": "ad8391d4e9c980856f2547aa945b2b6a407a6382158dc1ddd4f08d94ecc24be6", + "blk.25.attn_qkv.weight": "42e8983780d4a01a02c54ad23d4df21eea437f119a10af5a9c12a76a42d308c1", + "blk.25.ffn_down.weight": "302dd010d4e0ab4eeaee89090409ea0dddeeeed3236415eb8f97c942497eea91", + "blk.25.ffn_norm.weight": "fb34c1ee5bca96986c08834df0a0c047ba041c1123ac1f563e9d64312bf82d6a", + "blk.25.ffn_up.weight": "10739a8de156816d93c92b935386540bfa976bdbef204f0312960f6fc657582f", + "blk.26.attn_norm.weight": "7036c711609128c4e55968ff3681d3043338879a5737efd6c2ac9e1a2a61f1a0", + "blk.26.attn_output.weight": "db5db45dead5cb911fa01da59832f121b7c18b2d167bf53741c40819f24d346c", + "blk.26.attn_qkv.weight": "cae34c6b7f82ed14348d5ed30a79919c383737c1694a9cb9c0de609d3b0c1d0a", + "blk.26.ffn_down.weight": "491ec3a4da9b4f49f8ebc6be658ce397a9b801ae9fb35e82177e47808c65e5d0", + "blk.26.ffn_norm.weight": "fd7059d75d7f0e5288511ddeeb0f772eb3cae3ccfe4226b877015834edc3c386", + "blk.26.ffn_up.weight": "ea1ee1274c56458ce056d2205e5bb6e5422ce4cb0ad58006b8141749b97a0c39", + "blk.27.attn_norm.weight": "cc362c9a937609265052cd38544af17a1a7448cea086d4c801139e1fc865832d", + "blk.27.attn_output.weight": "ba757a81dabde9cb1b069d1bb616fe79649a1724f756567ec61caed1304fe6cf", + "blk.27.attn_qkv.weight": "1ab8d7d02d87756c12c2275636823aa5ede3d683178225c4cac4bd892c319bd4", + "blk.27.ffn_down.weight": "deb1c711c8a66acf4dcd2d088e1548f8e08f296f755e4067d6557fa55afde88c", + "blk.27.ffn_norm.weight": "fc6242d8cb8a4a37a8ddb7e41e7e60a63d4a89edf36acb35df052f10b9c91ece", + "blk.27.ffn_up.weight": "8df39b09c4801f343aca78f2918a1f6db78c8c55e591eda4c69eadb74c26e180", + "blk.28.attn_norm.weight": "75b539308f77e3cefdc6d98484d8b5cbf0538f0c2869a77b7373a145a18bc850", + "blk.28.attn_output.weight": "ae128940eb60a6d2e121762ef4b3e9dcf9eb3e105b249507fa7f12de0e19822c", + "blk.28.attn_qkv.weight": "bdda781c288e9326c240e33905f8e621b6a2ad902e620739d34f93fcd6f933de", + "blk.28.ffn_down.weight": "f1d6e6d1c286b1138bfd7e53fe477f399ae93bc2c04e35416f84218ed7247965", + "blk.28.ffn_norm.weight": "3f837ce82c8b9bde0d61d08b6f5fe5574886ea5328dbdc53f2929f18da8b4087", + "blk.28.ffn_up.weight": "2af027002e31d1b6cfedbdb30a2b9d7213f3aa691167c353913adfd48fda31e4", + "blk.29.attn_norm.weight": "61e8003b5329462ffe0fe172f2b160260de006aed858332d49d75504b6b6aa7a", + "blk.29.attn_output.weight": "ca44542a72a37476dc73dbdcc01f5b7497cb3ebc4ea230a55c9634ccd8e56ad4", + "blk.29.attn_qkv.weight": "abb3d9d6abe57872ae3daa51935d43264093ded5ce63b49d1e280ee5758be0e4", + "blk.29.ffn_down.weight": "6764b895fce881df097489c263446f0106de36217997660c15984b3ee22a5a06", + "blk.29.ffn_norm.weight": "89e03e9a33fc0e6e31ba9f0c2bd7c5734a118c5602bb90148793e08a80e8d0ae", + "blk.29.ffn_up.weight": "fa7ad57a84954f4121653152efed1a871d8adb20a1ea9086e3e849ce359d7d2e", + "blk.30.attn_norm.weight": "91a697aca1e42af54f806a20211031c3369e8d0bd58df1b0147fe24954e1f5a4", + "blk.30.attn_output.weight": "36063fcf766c89ac75be56f688cc63cefe5f2c733fbf4378ea9956ad386fa148", + "blk.30.attn_qkv.weight": "2cacd1161f1121a2c0b979930134f4666f73fb8d7237b3b0659ae091b15955a6", + "blk.30.ffn_down.weight": "9f3fcb6217100595850c05dc98f9ab2a263afdb6ab28df2fcb08aeff512057d7", + "blk.30.ffn_norm.weight": "6c600bc1fc7de39d4f8917b81fc7d1d5ed2a9b56492234c13a4bd6028c30d880", + "blk.30.ffn_up.weight": "73cabd1bb011956b2689ea3338bb76642ef3a57c197377d666d2ab5f56317668", + "blk.31.attn_norm.weight": "72d3e1cc771380645fa75a899858c95f39857a4f3f1ed60fe1578df383b8bc53", + "blk.31.attn_output.weight": "40089cdd29994dc19a1d89fa15902a89cfeca3540f12dc9bf4d00ef82506e456", + "blk.31.attn_qkv.weight": "1d0bb40e9258071ae14290a53c619a8e331dda07354d2a02ef45766c029ae5e4", + "blk.31.ffn_down.weight": "8defa0e06335b793fa8be03883f0a322d6c5b33f52c69c943c35c60d16e42c0a", + "blk.31.ffn_norm.weight": "33c55d9d0c496ccfb130361fe131649346e098abaaac39c0519507e5d846721d", + "blk.31.ffn_up.weight": "599f6503f61c692c1f82001973d35119f9688db5e6be9d9c298411491c93f09b", + "output.weight": "14b8dc662bfa3308ebb2e102c562d8e52c15670e538f20f3216a9c310ca9dd41", + "output_norm.weight": "7f2294ba94ce65681df6c7ddd8698799199b9d77dc83c10bdad5c3999f0fdb82", + "rope_factors_long.weight": "e34d378664e354652c38f47d10dafb0498ccc2fb042d39ff7fef768146fff22b", + "rope_factors_short.weight": "9379146a4988f373d362fe47b06c75e7fe7c54aa4dc9558758df79b7a87471fd", + "token_embd.weight": "19a03c1fb5ac0baee93b0a7d8b0f26e9a9b011e229b694afc50ebfc13d84f8bf" +} diff --git a/convert/testdata/Qwen2.5-0.5B-Instruct.json b/convert/testdata/Qwen2.5-0.5B-Instruct.json new file mode 100644 index 0000000..74f1956 --- /dev/null +++ b/convert/testdata/Qwen2.5-0.5B-Instruct.json @@ -0,0 +1,314 @@ +{ + "general.architecture": "qwen2", + "general.file_type": "1", + "general.parameter_count": "494032768", + "general.quantization_version": "2", + "output_norm.weight": "93a01a6db3419e85320a244bbf8ae81c43033b1d10c342bea3797ff2ce348390", + "qwen2.attention.head_count": "14", + "qwen2.attention.head_count_kv": "2", + "qwen2.attention.layer_norm_rms_epsilon": "1e-06", + "qwen2.block_count": "24", + "qwen2.context_length": "32768", + "qwen2.embedding_length": "896", + "qwen2.feed_forward_length": "4864", + "qwen2.rope.freq_base": "1e+06", + "token_embd.weight": "d74257dc547b48be5ae7b93f1c9af072c0c42dbbb85503078e25c59cd09e68d0", + "tokenizer.ggml.add_eos_token": "false", + "tokenizer.ggml.add_padding_token": "false", + "tokenizer.ggml.eos_token_id": "151645", + "tokenizer.ggml.merges": "6b1b1c58f1223d74f9095929d3e6416cdd74784440221a5507b87b8197f2bfd2", + "tokenizer.ggml.model": "gpt2", + "tokenizer.ggml.padding_token_id": "151643", + "tokenizer.ggml.pre": "qwen2", + "tokenizer.ggml.scores": "94e247e531e8b0fa3d248f3de09c9beae0c87da8106208a8edfaac0b8ec4b53d", + "tokenizer.ggml.token_type": "b178dbc9d1b2e08f84d02918e00fc2de2619a250e6c188c91a6605f701860055", + "tokenizer.ggml.tokens": "1d93f6679b23a1152b725f7f473792d54d53c1040c5250d3e46b42f81e0a1a34", + "blk.0.attn_k.bias": "5ce6617845f66c34515978d23d52e729c298d8bffa28c356a0428bef17142cf1", + "blk.0.attn_k.weight": "a960832a9e0e83e4d95402e5d1a01cc74300fcca0c381237162126330e1a7af8", + "blk.0.attn_norm.weight": "32c7d51cd0958f1f1771174192db341f9770516d7595a2f0fd18a4d78bd5aba3", + "blk.0.attn_output.weight": "c67e6e7e868354a11bf9121c70ee56c140b20eec611a8955e7dfe54a21d40a98", + "blk.0.attn_q.bias": "3e9e994eb1f03bccfc82f8bb3c324c920d42d547e07de5be83be12c428645063", + "blk.0.attn_q.weight": "dc12132f789b97cfa1e3f5775ceb835247fa67aa47400fd09c8f9f3769208583", + "blk.0.attn_v.bias": "a3fd0757b31fdc78af5ec320332d239c1a79d34e8804df06c5454e86955e8cc9", + "blk.0.attn_v.weight": "f43094a2134c7ee2dcc52aac3c8b7d9d64fb0295a8adb94cabfd49213f017b84", + "blk.0.ffn_down.weight": "18c2aec92db14f21976838a8c35d5575f80d0e4b1e05ccc0d8388d5877e80147", + "blk.0.ffn_gate.weight": "a3a1c4ef38f8f750eabadfe3d83bbb0f77941eec1cc1a388e51852e99c8691f6", + "blk.0.ffn_norm.weight": "b59b779c42d44b5c4cec41e39b4eb61e0491a07c1b3e946ccb5b8d5c657eda3f", + "blk.0.ffn_up.weight": "db64f09987ea59449e90abae5a2ffcc20efd9203f0eebec77a6aacb5809d6cff", + "blk.1.attn_k.bias": "a5c8c5671703ec0aa0143ff70a20ffdd67b5d5790ca1dfa5bba4e87e4071ed9f", + "blk.1.attn_k.weight": "835c7c7cc95b3cb2e55bd9cac585aa0760a033896621d3e06421f3378c540f7d", + "blk.1.attn_norm.weight": "f4c36fb6c14fce721fab0de78cc118d6f66e3a3d3ea0017bb14aade24c3c5434", + "blk.1.attn_output.weight": "cc1e80310c97cef068e48e40b7096f32fa2138519d6209c6a1a9994985999016", + "blk.1.attn_q.bias": "bc332780e66b0aac80ec5e63ac32344919a840db2fcc8f87bcef16a43a54138e", + "blk.1.attn_q.weight": "d766f06c925cce38d4b31b2165b3448e1fb49a7d561985f95d9cd2fcba52367a", + "blk.1.attn_v.bias": "9f486626fb6ed9ac84970a71e9b9818dd2758501fd3f61bb1c08540dcc7a8631", + "blk.1.attn_v.weight": "e873d1e5bd4f4d6abfd47c0f55119c2c111105838753ee273a03c5ccea25ce5c", + "blk.1.ffn_down.weight": "b3ce82b093f187344de04284b1783a452de1b72640914609b8f830dc81580521", + "blk.1.ffn_gate.weight": "5cd44ad237edaca525a28a3ac13975d1b565f576d6a8003237a341ae0d156f2e", + "blk.1.ffn_norm.weight": "4ac774ee8afaee119610c46aa1ff89fc6c9084a29d226075bc4aa4d2f15f746c", + "blk.1.ffn_up.weight": "042d81ab5f1983d85c81213232f3bfc05a9302d9dfaa98d931ebba326b6058b8", + "blk.10.attn_k.bias": "767ecfeacd60a2c2221ac4d76c357190849dd9cdf64ced418d9d0c7949101401", + "blk.10.attn_k.weight": "a9f3df343227537636be8202303453086375091944e498bad11e0b91e45e8c71", + "blk.10.attn_norm.weight": "01acd0e7b3e363f873dbfde6f0995ffcce83f5aaa10ff91c31dbf775035f6d5a", + "blk.10.attn_output.weight": "a531fe660769604ab869f01b203eb115e025cad4c0baeacdd1bcca99cf6d0264", + "blk.10.attn_q.bias": "356a02c9163dd660c1340fbe1e049b335ac6178891e00996131bba9ab4cb3e59", + "blk.10.attn_q.weight": "81be0cfb227339d83f954cd8dcf35828441211c6e1d184060e3eb76085041e2f", + "blk.10.attn_v.bias": "ed0450653284b62f8bf2c2db19c0ff7a6cf3cda1324d0a044c5e3db7bb692bd3", + "blk.10.attn_v.weight": "c1247ff7092babd2ed979883095b9aa022b2996cab1c77fb9e6176ddc1498d16", + "blk.10.ffn_down.weight": "fda7544965dc9af874f1062c22151c6cefc8ba08cbe15dc67aa89979e77b2de4", + "blk.10.ffn_gate.weight": "9f2632b1dee7304d10c70bd38d85bb1f148a628a8468f894f57975b8a2f1d945", + "blk.10.ffn_norm.weight": "94f8cbd6b17a4d5aabd93fa32930a687db3b11f086142f1cd71c535c11adcad4", + "blk.10.ffn_up.weight": "8dc2f8db0474939a277a3d89db34c3bcc3381cfea57bd05a8426a164634d9112", + "blk.11.attn_k.bias": "3b8e5a662b19411e3f6530714b766aad2ee41eebc8161bec9db0bc82d383a6e0", + "blk.11.attn_k.weight": "2c29f1ed1ce53ce9604e9ea3663c2c373157e909a0d6064a8920005f6d15dad9", + "blk.11.attn_norm.weight": "48f68a99c3da4ab4c9e492677b606d1b8e0e3de1fdbf6a977523f97b8c21ec31", + "blk.11.attn_output.weight": "5859f3838a94898b020c23040941ed88f4fcb132db400d0849f30a01f62c0f1c", + "blk.11.attn_q.bias": "c5ad89a5628f2bd81252ef44ef6bbcbff15c33ad16fba66435509b959c2af6d3", + "blk.11.attn_q.weight": "d102104e5d61c1e3219564f1d0149fd593db6c6daa9f3872460c84403323cfef", + "blk.11.attn_v.bias": "8653f7d48c5f75a5b55630819f99ecf01c932f12d33fd1a3ee634613e70edde8", + "blk.11.attn_v.weight": "e0a7c7d89b9f2d0d781ce85330022229126e130a8600a09d4a5f920f0bbd50b2", + "blk.11.ffn_down.weight": "4a22b3361eba8bbe1d9a6fda1812618e894c49f13bcacb505defa9badb6b96a6", + "blk.11.ffn_gate.weight": "484698b206760d3fd8df68b252a3c5bae65c8bf6392fb53a5261b021b6f39144", + "blk.11.ffn_norm.weight": "da69e96338cbe30882cf5a9544004387f5bbc0bcb6038e61ba2baabbd2623bac", + "blk.11.ffn_up.weight": "26ec74f1f504d1281715680dfbcc321db4e9900c53932fa40955daceb891b9aa", + "blk.12.attn_k.bias": "f94b49ec3e498f14f6bc3ebefe1f82018935bbe594df03253bfffae36bc20751", + "blk.12.attn_k.weight": "ae6323d0bbcfcea01f598d308993d1a7530317e78c1f64923e36d4b1649e9e73", + "blk.12.attn_norm.weight": "3784536a7611a839a42a29a5cc538c74ee4f9793092e5efe1b227b48f8c4d37f", + "blk.12.attn_output.weight": "46826c00b066829355db78293ab216e890f5eaaed3a70499ee68785189a6b0d9", + "blk.12.attn_q.bias": "b14db2d327ce0deec97beda7d3965a56c43e1e63dc9181840fb176b114cf643a", + "blk.12.attn_q.weight": "30f67df52ced06f76b6c85531657584276a454d6ec9bb7d0c7d2ca8f067f5551", + "blk.12.attn_v.bias": "57ab4b7e43f4fc5853bca7bfbb2702f8c2c391a49252a760abbb7b26330dc4aa", + "blk.12.attn_v.weight": "3ccd9da0cfe241cd33a63310f3ca6d81c5bc5a50d200bfea6612ac376166aca2", + "blk.12.ffn_down.weight": "a095774413198a83c549ce132d7c9684c0baef33145eaa889be370ef9c881c81", + "blk.12.ffn_gate.weight": "bb3b2bbdfb065d2a0a795909c53beec327781a4a7e974bf9f99c436cea459991", + "blk.12.ffn_norm.weight": "3b486c6cd97eb4b17967d9d6c0cc3821a1a6ad73d96b4d8fbf980101b32b8dab", + "blk.12.ffn_up.weight": "d020b82dd39a5d5a9d3881397bf53a567790a07f395284e6eb0f5fe0fef53de3", + "blk.13.attn_k.bias": "69381f8254586eba3623eceb18697fe79f9b4d8f2c30136acb10d5926e3ba1d0", + "blk.13.attn_k.weight": "c4d7a31495d71269f81b586203a50abea3a9e2985667faf258c9306ec6030f1d", + "blk.13.attn_norm.weight": "907da11075d16eda668dabe548af3cfd794df26b8ab53939af1344d91bec6fba", + "blk.13.attn_output.weight": "ca01cf6d2b8ece2fb3b0f56f1eb76194471ac27b54fe264f99c909f5eb7fef4a", + "blk.13.attn_q.bias": "2f5ecebafe03b1d485b93c41cff756ca57fb65b02e9d8336f14a3d26ab5d159a", + "blk.13.attn_q.weight": "f557f8acad7f0fa62da06b5da134182fe04a5bed8bdb269e316f970c9cc440fb", + "blk.13.attn_v.bias": "a492a88ae131e95714b092545a8752eaea7c7d2f9cb77852628ca8296c415525", + "blk.13.attn_v.weight": "d1220b1fe9f1cc0a5a88ee239d65fec900f5eaf6c448b6c2cbe74c81e15ed333", + "blk.13.ffn_down.weight": "53184e33440b49848a896304eb16a983efbc6b8bee0b93de8c8de716e1585fcb", + "blk.13.ffn_gate.weight": "684bf8896f148c851506c62717e45c426921b93c10d536ecdeb0fb28259a106d", + "blk.13.ffn_norm.weight": "6cb4e547ad8665eb7c174855c08afe1e5490fece66122522c1e9e8132d9064eb", + "blk.13.ffn_up.weight": "c64107897e38c06727075aba4ea7940b2cdd0e278b5c555dffb2790ef553bb57", + "blk.14.attn_k.bias": "2814ca9b160b16ae39557c9b629482fbe3a7592d372c1e1bf1ac59a2d578fde1", + "blk.14.attn_k.weight": "3377177396463afba667742972920ebb45dfdc37e9950e1f0e1d60a2f936b27d", + "blk.14.attn_norm.weight": "5cae870477d51dd35a6d22aaeacfce4dff218ffba693820ede6a4e11f02afd6d", + "blk.14.attn_output.weight": "3cfe9ccf3d48ae9e95b93a132a1c6240189a277d764f58590fb36fdbb714cad0", + "blk.14.attn_q.bias": "6a75acc2f090b2e67bfc26f7fca080ae8bd7c7aa090ec252e694be66b8b8f038", + "blk.14.attn_q.weight": "5ef45c86d7dda1df585aa1b827b89823adf679a6bb9c164bd0f97b2aa6eb96f1", + "blk.14.attn_v.bias": "5534480443e10ed72c31a917f3d104b0f49df5e6dbfa58d0eb5e7318120e3aee", + "blk.14.attn_v.weight": "58f45cf3240c4623626ec415c7d5441eaa8d2fb184f101aba973f222989422d1", + "blk.14.ffn_down.weight": "2dc82a0f20c05b77512458738130d8d05ce150cc078680ae7ee6dd7ed68d955d", + "blk.14.ffn_gate.weight": "d4a6c6f0fcccddfd1fcaa074846622f4a74cb22b9a654ab497abdc1d0dde9450", + "blk.14.ffn_norm.weight": "777e444932a0212ff3feac98442444e17bd8a98cb758ea3356697d0846d12c56", + "blk.14.ffn_up.weight": "6b75f6bd00195198447b69a417ed9d98f8ca28b3cb8be82f4bad908be0777d57", + "blk.15.attn_k.bias": "2d07211a58e6c2f23aa3a6dc03c80a7d135dfb28726b60b0e0fdd0f35ea5c37b", + "blk.15.attn_k.weight": "e77f3c0075a1810e70df956cc51fd08612f576cc09b6de8708dcae5daedb0739", + "blk.15.attn_norm.weight": "379a10d90609a5d5ba67d633803eda1424fc61ba5cca8d3bffe70c8b18b58ebf", + "blk.15.attn_output.weight": "402751c12ee9dbc9db5e3bf66a7b23ebe7d36c0500e0be67be4c8b1c4357fa62", + "blk.15.attn_q.bias": "acb37fc409ee725ceedf7a3a41b40106086abc47b76780728f781942c5120208", + "blk.15.attn_q.weight": "89cd3047a09b46ed2bb57c69dd687f67a1f0235149b30376fa31b525898e4a55", + "blk.15.attn_v.bias": "f081a37289cbe811978feb4da3ef543bdeb7355414d476f44e09b498da10cb2c", + "blk.15.attn_v.weight": "8404f242a11e6d512c9ead9b2f083cda031e9b269f8a0a83f57ee4c56934764e", + "blk.15.ffn_down.weight": "93438f43ee8cc4f1a7fd3840a6afdd5f02123e76db4f0d9474430c0100d148fc", + "blk.15.ffn_gate.weight": "ff935a2698843e87fad9dbf7125f53e460190ec71ee128b650b3fc027fe37bfc", + "blk.15.ffn_norm.weight": "4be80f199841cba831982e988451e1833c3c938a4d6ca1169319087bf0bd723e", + "blk.15.ffn_up.weight": "ee9ba63c66d71053e33551ddd519878bb30b88eeb03cfe047119c5c4000fb0a6", + "blk.16.attn_k.bias": "3f5fbabed4510c620b99d9d542739295fa6a262a7157f3a00a4889253f8341b8", + "blk.16.attn_k.weight": "8ca6eb139b281c257324cddea97a8e9aa7c048b53075cf00153123b967c27ee5", + "blk.16.attn_norm.weight": "290157f005e5aa7dddf4bd60100e7ee7b0baa7f11ec5c2cea5e0ead2aad3a4c6", + "blk.16.attn_output.weight": "b1f4d80a7447f08f1c331712527f750d00147f35c042442ade96fd029dadc5a1", + "blk.16.attn_q.bias": "e3e4e442ad4416791b468cad8de0d0d2d68c7e7df8d06002f4d49b4da9cb25e4", + "blk.16.attn_q.weight": "cc7392fa5bb1107d3816e7e7363de252d37efd4165d065e258806291ce0a147b", + "blk.16.attn_v.bias": "a7629830f2f6293e018916849614636d40b1bcd11245f75dbc34d38abae8f324", + "blk.16.attn_v.weight": "b6c7856c7d594437630929c8cf3b31d476e817875daf1095334ec08e40c5e355", + "blk.16.ffn_down.weight": "f9c0a777a00170990a4982d5a06717511bf9b0dd08aeaab64d9040d59bcbebba", + "blk.16.ffn_gate.weight": "ed88f11bc3176c9f22004e3559ccb9830a278b75edd05e11971d51c014bd5cd2", + "blk.16.ffn_norm.weight": "ab24abdcc4957895e434c6bb3a5237a71ff5044efb9f76c1a9e76e280c128410", + "blk.16.ffn_up.weight": "99f594dc8db37f554efa606e71d215fbc3907aa464a54038d6e40e9229a547ff", + "blk.17.attn_k.bias": "f236625676f9b2faa6781c7184d12d84c089c130d2a9350a6cf70210990f6bf1", + "blk.17.attn_k.weight": "c2a4f20cd3e98538308a13afe9cc5880bdd90d543449c6072dedd694b511ee1a", + "blk.17.attn_norm.weight": "5a9da4ee168311f487a79fc9d065a035432c6cafa8adb963a84954cf32f57a2a", + "blk.17.attn_output.weight": "d5df7031e354186ce65dc09d6f8a92eb721c0319816f8596b0c8a5d148ed0a2a", + "blk.17.attn_q.bias": "3212d5eeaa7ed7fac93cc99e16544de93c01bb681ae9391256ed4a8671fc6b00", + "blk.17.attn_q.weight": "d18cd9aa7ee10c551cb705549fa1ae974aea233f86471c9a19022dc29b63d0d5", + "blk.17.attn_v.bias": "a74ad11a1f8357742f80e2a0c0b3a2578fc8bbaf14c8223000767e07a5d79703", + "blk.17.attn_v.weight": "da18ac0e90884436a1cb0ad6a067f97a37f321b03c70b8b03bf481339fef5c80", + "blk.17.ffn_down.weight": "81a8a5d7a194fb53d976558e0347efbe9fdb1effffde9634c70162e1a20eff51", + "blk.17.ffn_gate.weight": "72870d83ab62f2dcd45f593924e291a45e4ae1b87f804b5b88aa34cfd76dd15e", + "blk.17.ffn_norm.weight": "cae39ac69b9bdaeefab7533796fdf11dbb7a4bdbdeed601e20f209503aafe008", + "blk.17.ffn_up.weight": "e7cb40b0842468507cec0e502bbed8a86428b51d439e3466bc12f44b2754e28f", + "blk.18.attn_k.bias": "8bfc02b94f9587aa125e2d8bbc2b15f0a5eb8f378d8b3e64a8150ae0a8ca3df2", + "blk.18.attn_k.weight": "434bc3b3332ea48afee890aa689eb458a75c50bc783492b0cbf64d42db40e8ad", + "blk.18.attn_norm.weight": "d6ffc09396c42a70d1f0e97d81113eee704d3bfc9eeae2bed022075a5dd08075", + "blk.18.attn_output.weight": "133f001f81f3b082468a7de67cb2e7a76508fce34bcc4dee7f0858e06eee082c", + "blk.18.attn_q.bias": "758d0e28bf5e660b3090aafb70e2a3191b4f3bb218d65e9139a086ceacaf599f", + "blk.18.attn_q.weight": "12d7b86fc1b09b9fa7f8b7ed43d8a410892cec8672d0c752f8346f6193343696", + "blk.18.attn_v.bias": "9efd15bab0519462431d6c6e8a5b7dd4e151dc449468097ee0ddca369c0ecc2e", + "blk.18.attn_v.weight": "f631231a79d4a2e9730fb2e386d8c18621eb3fb7900fbfdff5e6d52cc42db122", + "blk.18.ffn_down.weight": "874a2dddf456f3ab56b958b0860d71c8c680a6f89322c9bf6b2f32a113592300", + "blk.18.ffn_gate.weight": "4549ef8976c345a511df4a7133bdaf6fe387335f52dfd8a4605a8ae3f728c403", + "blk.18.ffn_norm.weight": "80c258a2536a860e19bfcbd9f29afa13214fbb4c34bde0d4da51287d354e9a59", + "blk.18.ffn_up.weight": "8b03308a581457a3c038b7a086f3cdf14941d7ad4107c4bd6d9d6b062fd00d73", + "blk.19.attn_k.bias": "e77f7b0c8e3e0a9b0d61918cd88371047752a1b02b1576936f4ec807d4d870ee", + "blk.19.attn_k.weight": "a2a318e93355230c0d0f95c441b080bf9c4914507255f363fb67a5e771d4d1e6", + "blk.19.attn_norm.weight": "9a4bdeb3970be21ac74a94c2c81eb36986533db81b78db6edec48d9802910d59", + "blk.19.attn_output.weight": "2369b103dd3947e2cef02b2669b405af5957fb3a7f9d0ff40646078c4b4317ad", + "blk.19.attn_q.bias": "e20bf427bef69059ae84a5d9f98f7d688489627f198fb6153def018ff9fd2e34", + "blk.19.attn_q.weight": "45a3bb3bdfd2f29dd76e5f78ddae73678b9a2a85dfaf609e460240ef5b7be2ad", + "blk.19.attn_v.bias": "a441f58a3e02ed86ee1819eefc9bd4e8b70d11b864a929d58a2c2ac0aeb8203d", + "blk.19.attn_v.weight": "30b0b04480c510450a7abb2ce9fa05c65b150a3cc4dc76f8916bf8d013f1b6be", + "blk.19.ffn_down.weight": "eebb9ab8fdb6a6efcfff8cf383adac9ec2d64aeeff703d16ed60d3621f86c395", + "blk.19.ffn_gate.weight": "3fef1493029298378886586478410b3d2e4e879f6aa83c07e210a7ce6481817f", + "blk.19.ffn_norm.weight": "e1be99ea1e8fb9678f7b8ba200f3f37e03878f3574d65d57bcd3a9fd796e2112", + "blk.19.ffn_up.weight": "f07cf25e09394fb69fe3ef324bdc0df9a4cecf3dc53070b8acc39e6d1689bf82", + "blk.2.attn_k.bias": "b29baa8221f125eff6b8ac1a950fa1d7cfc1bce7bdc636bf3df7d4065ab6466c", + "blk.2.attn_k.weight": "4bd0c179bced8bc37a09f5748c394e0cf50273942fb38a866e5cf50b6c96c437", + "blk.2.attn_norm.weight": "07b3edc6a6325c3428aa12f29bcae0be0de363ce61a6af487bc5c93fb8c468d9", + "blk.2.attn_output.weight": "056b5b31dbc81087c81b9d41c25960aa66c7190004c842ba343979644d7f4d88", + "blk.2.attn_q.bias": "479b6212401e097767c9d52b12a1adb8961c0fce9fcaaab81f202a9d85744376", + "blk.2.attn_q.weight": "f89196076f446a6dd8a9eee017f303504f9c03094c326449cee5a7fc0a97fade", + "blk.2.attn_v.bias": "ef9b1b986dbd9d7291027a88b67dc31434435b20e76e4f1e9d6273ebd31224f0", + "blk.2.attn_v.weight": "9322f4f00e85f8c0936845c51ca64b202a93df104f36886986a8452a8e4967a5", + "blk.2.ffn_down.weight": "7beac0d2440dc49af33ededb85a6cc3ba23ab33ad3ffa5760714b2ef84d94f6e", + "blk.2.ffn_gate.weight": "818a93864a5890c1f4dc66429004fad07645a50142350e9bff9a68fe24608a52", + "blk.2.ffn_norm.weight": "152c924d5514942ad274aafb8cc91b35c1db3627c3d973d92f60ff75f3daf9ba", + "blk.2.ffn_up.weight": "9c9579e600f209546db6015c9acfeda4f51b6d3cca6e8db4d20a04285fe61a37", + "blk.20.attn_k.bias": "fd22bfeffb63d818ce2ff1ea2ace0db5d940f7a9489b6bfc1ec4a5398848d7fe", + "blk.20.attn_k.weight": "f74439bc74c2f9252130c9c28384fd7352368b58bb7ce3f2444cf0288dfff861", + "blk.20.attn_norm.weight": "5c15d2613df87be6495fb7546b7dcedd2801d12fa5ecc02c877df889330e8f37", + "blk.20.attn_output.weight": "6731a39286a67f6859832f96695732e579e14e0c36956eccd1edce3db11595b8", + "blk.20.attn_q.bias": "04466e5a3f454a19b9b433fc2585396feac780027ece7ccb4e4bb3e406fc14d8", + "blk.20.attn_q.weight": "ead4c71daaeb17bf20d014a34c88b97f238456488e815ae0f281a5daf6fc99b8", + "blk.20.attn_v.bias": "adcc848e043025de9bd55ccb14dd8fb6343e8b5185ed07e12964be41d0faf99f", + "blk.20.attn_v.weight": "81bfc23f83526386a4761c2c16b6a93cd0bbf9d846c1a51b82c71f1474a465f1", + "blk.20.ffn_down.weight": "9bf660af3bafad919d03173c89a65fc9c89440a76c42c9e55e4d171076f3c17f", + "blk.20.ffn_gate.weight": "c04b4f3ccce44917ee228b998e2c19dd702aef10a43413afb152e808b5ac5c42", + "blk.20.ffn_norm.weight": "3d5b555d7746a71220143c6b8fff5ce4eb63283d9d9c772f1233d848f69f4ff4", + "blk.20.ffn_up.weight": "d7a196505c39e5469dfc7c6958bdbb54e93629ac1a047a6663ed96b318753094", + "blk.21.attn_k.bias": "4db1f48e5c6a3bc5720a5da813bbef08283e6269e12d83f8a9c54e52715d8011", + "blk.21.attn_k.weight": "c687b2f0e132a5e220a2a059b61aa2a537f37d8a674d7709f87880637b263b31", + "blk.21.attn_norm.weight": "ec23b0ff847a4b45585ab8e04f10fc20bb1637c5f1fbcdc4d73f336bcb5d1bd0", + "blk.21.attn_output.weight": "01255390576316c1731ef201e32c6e934eba356c28438cd06d9027ac6a3ff84f", + "blk.21.attn_q.bias": "3098f37205a15418e1681e407c82b7ce7c6fda6c6826b0590a13e1b68a38a1ea", + "blk.21.attn_q.weight": "30ea62cbb702a5359229dc96819df17ee535e2e9988d044b005c73ea536e1005", + "blk.21.attn_v.bias": "7bbedb2c22a04737f21993115701d4a06b985b7ca3b64681f53cd1be8d7ea39e", + "blk.21.attn_v.weight": "e11905e63579e36fbee978062af7599339ae29633765a4835628d79a795ec8df", + "blk.21.ffn_down.weight": "84def2ffd8aca766f9ce12ed9ac76919ab81eb34bdeae44fa4224417c38af527", + "blk.21.ffn_gate.weight": "4e99f05377b4a0b8d875045530a5c59dee6a46ac8a45597f6579f6fdfa800787", + "blk.21.ffn_norm.weight": "af48f13d03fba38ff8794a5f5005e666e501f971ca2e30bbded2777a8096f37d", + "blk.21.ffn_up.weight": "a29541c39a6acbc364be86994632a5bf55d701027cb7f23320f8c6d55ee42c91", + "blk.22.attn_k.bias": "c97f84db6c75422df6ef5768676d4e9abefaa3b8337aa2730ff260f8fc350480", + "blk.22.attn_k.weight": "af9a0c56f68779513e95be11611b7be6175ddae27d48bee9dd72fdbf05f6cbfa", + "blk.22.attn_norm.weight": "1c7518eb5bcff4a202c6f4a2827f14abd76f9bcc64ce75fe9db60b69437a5c9c", + "blk.22.attn_output.weight": "1abcf1f3caa2f59dd018646b93f9cf8fd30d49e98a473e6a8704419a751be46f", + "blk.22.attn_q.bias": "7221e01cb692faf2f7f8c2eb6e2fac38a1b751a9c9fdb6a21a0a936eb0bf4b96", + "blk.22.attn_q.weight": "faaf8fb7b6c19f343d47f3ea6b57151fb46c787e0b3bd2c292fd327d3d4d8e35", + "blk.22.attn_v.bias": "3ec05942e82d735de99dfd0d8228d8425e63e2fc584da98b3326bdef89ecb2e5", + "blk.22.attn_v.weight": "42e7b0ad06db76227837da9d4e74b2db97f3df4050ecb3a87cb9b55e08dfcb42", + "blk.22.ffn_down.weight": "87ef98ad2d0e824b0fa5ad8aa18787162922e527c9b1b721a99bc07d3bf97c82", + "blk.22.ffn_gate.weight": "562d6e5a1654b03aaa0e33864d23c10297fd4bcaa72d30fac69fb771ee1df9d6", + "blk.22.ffn_norm.weight": "f8a405dee467749d59427ce05cdd4b9c11bb18934a89258ea461f013b7d251f5", + "blk.22.ffn_up.weight": "90e1f4ae4062649d4d838399eb353e8bb8d56a49982b6a7f64aa3945377f7187", + "blk.23.attn_k.bias": "9ad22178a85f3be7e25f5aff462f31627466364f2f5e92f265cc91db0da9a8a8", + "blk.23.attn_k.weight": "d813beffb10f03278f5b58eea0f9d73cdcb7b5b4045ae025c379592e854f7dfd", + "blk.23.attn_norm.weight": "f583c9836044bdb056d6f8911088ac28add68e500043ae1f97b5d9158fe3d769", + "blk.23.attn_output.weight": "02789911ac3b97f6b761e958b7dd6dc7da61a46a1be92bd0b346039ca7ecd2b2", + "blk.23.attn_q.bias": "38c4970fb9b4f7e4a139258a45639d848653814b4bc89ea9849709b13f16414b", + "blk.23.attn_q.weight": "eb694be9a5ab5858b8dab064ee4cce247dc757424e65282989bd4d015b8580ce", + "blk.23.attn_v.bias": "0a25f6533aa7e7a152a4b198cf6c411c2408a34afa4f161bb4d5ffba2f74e33f", + "blk.23.attn_v.weight": "187e1bac6b70f74e6364de226565aa8275ee2854d09cbe5895451a689596049e", + "blk.23.ffn_down.weight": "88880dd9ba7ee80ade972927f810b5d2c30a69520c615190b27f9daabc0a8c5a", + "blk.23.ffn_gate.weight": "5abec63197935ab3eb8e6de0a5307396ec46cdb1cc5de25d87c845f3c4a3e887", + "blk.23.ffn_norm.weight": "60e1f5e6310c3a531c554a6bb7cd883aed58db1e51853f739436ea461c1843d7", + "blk.23.ffn_up.weight": "3d7f502771743f4a634188dfcd8b8a384fb07467ca8528366aee59ddb25b7bce", + "blk.3.attn_k.bias": "0b6b442ebbac29c8c4b67e8e3876d0382dd2dc52efdf4ab0ebbc6f71b6252393", + "blk.3.attn_k.weight": "480f40584fbda692c26f2cee45f5923780b236f8b4e8ec7bbee0237777a0918d", + "blk.3.attn_norm.weight": "39872be2af31bc9cd6b583ebba6fb759f621d586d66e5a2fc0b85991615a8923", + "blk.3.attn_output.weight": "924b2c80d8513bf637f8ebb3756a340d9cf2243de723fd08d7f5dccd46b3f8b6", + "blk.3.attn_q.bias": "863c9d848156847a3fe9bbc44415a4395245b5d13e95673c014fdb71e494ab0a", + "blk.3.attn_q.weight": "bff73ee5de92fba8f6c089bbb19ce57e17ab3c9c29295712804bb752711b882e", + "blk.3.attn_v.bias": "e1b6fea126e86189112fcdfee79ffc66a087461527bc9c2dc52dc80f3b7de95e", + "blk.3.attn_v.weight": "7812b7f5133636f06cdbb4dcc48ef7803206538641b6c960777b37f60a8e6752", + "blk.3.ffn_down.weight": "00b393d6a7e3ad9b5224211ccdbc54a96aae151f24ed631764ac224972a6bc82", + "blk.3.ffn_gate.weight": "cfd63fa3a038af05dc53c6eeb3c192f1602f26ff24cb840bcf1510fcb37b5513", + "blk.3.ffn_norm.weight": "7389fc240a282949580ea2f5b0d7973ac79f32f76dc0155b537bb6b751f8e27a", + "blk.3.ffn_up.weight": "2a945f47090df9cb16f92f1f06c520f156f8e232182eaaed09f257b8947a2a62", + "blk.4.attn_k.bias": "62533c31f0de498187593f238c6597503fef2a92e920cd540a96bc5311b3b2a0", + "blk.4.attn_k.weight": "93e829868bffd980a8e589b9c4566cd81e6ce4296a5f357a2ae93febe1284156", + "blk.4.attn_norm.weight": "9e0aaa4bbdd1389890f8abec20533f3ab16d61b872b1a8dbd623023921c660a9", + "blk.4.attn_output.weight": "74467d6f44357d67f452ac49da861468b38e98057017bd38bc9a449f9d3538e6", + "blk.4.attn_q.bias": "8e6d9026fd69b314c1773c5946be2e11daf806ef22a5d91d744344fd30c58c59", + "blk.4.attn_q.weight": "e5bfbafd94a4d530f3769f5edbba8cc08d9b5bee8f66ebf4cb54e69bc0b7f63b", + "blk.4.attn_v.bias": "20c570f92022d9905eb85c0e41d1fdb30db22007a9628b51f512f8268d6c34a2", + "blk.4.attn_v.weight": "9638d459d61da03c9dd34dad985e03c43b4f8a5bc9701a82153478329b0517e0", + "blk.4.ffn_down.weight": "9d91b06e89d52f4365dece7eaeec50f81e52cb2407b333248a81e6e2f84c05b8", + "blk.4.ffn_gate.weight": "bf6350a79c6a6ee9146edfd788b88d4a4c2b54db1aa0adcc1464dbba8a84b646", + "blk.4.ffn_norm.weight": "11a70a6b9f7ce336292f4e3a2c6c92d366d4ee4306ad4fdb1870fde107e9cc31", + "blk.4.ffn_up.weight": "64f23f493d02b147a72a59605e6b7dd1c4c74f6813a38a2a60818bd66f697347", + "blk.5.attn_k.bias": "f6c2c279c0ed686f298ad1e5514b5cd882199341f896abbb2c2129d4c64ce9c5", + "blk.5.attn_k.weight": "0e682f75870abf9efaca10dac5f04c580f42820ecf4e234d43af967019acb86f", + "blk.5.attn_norm.weight": "01efae7653705e741932fcd79dff3be643d7e97f4b5719b887835dffe44b3a82", + "blk.5.attn_output.weight": "69e841d00d196acc489cd70bc5ffbbb63530ac5fabb169d40c4fb3a32ebb8ed8", + "blk.5.attn_q.bias": "f3304d76ccd44fed887565857c8e513b1211d89a5d3e81782de507ab3f6fc045", + "blk.5.attn_q.weight": "98612a6b7920a247853ada95c240807d4ca8e43604279e7a2fc9bb265ae40469", + "blk.5.attn_v.bias": "39940a9b353ceed3edfd4a39b985c9520490aa1b9f11749c94fdf6d879d1a259", + "blk.5.attn_v.weight": "839f84b828cf83aecf479a0dc7bc86cce05145ef77dcf29916dc3e0680f5b665", + "blk.5.ffn_down.weight": "1f48cbb0960f15e06ab8a3754ade792995a655856389ddbca629c07e89d1b114", + "blk.5.ffn_gate.weight": "33d8219fce3189e1aab376039896eebd4ad36ebd26a8278cd19b26e4357e4f81", + "blk.5.ffn_norm.weight": "0f4a0f83d37127fa4483f2905cb4f38ef6ddc71584b6cb05632c62a9af313dda", + "blk.5.ffn_up.weight": "22a64a11e5f0a1ff45ca327bf9e1efa258f085ff6a96edc398b7474f725b4514", + "blk.6.attn_k.bias": "baa91df99d4df2d25e8d590bca4e334b97f2d9aa3df8e748fedc8a6188499111", + "blk.6.attn_k.weight": "121f3b9f4b9491996499392e2688a929cafe102a67920b4cb2a039349c43d8eb", + "blk.6.attn_norm.weight": "b4cf987e923d71f2f84c58d20ea8af7576b225bf61952145b489fdd395e3d411", + "blk.6.attn_output.weight": "a112642150a138d54b2a4038042fd33619035a35694771e966f3575856c635d6", + "blk.6.attn_q.bias": "a97ea10469cdfa3fdddf8bad6de683ef99f6170eb8d29d15dcf6bf4bce37c5a3", + "blk.6.attn_q.weight": "d80c787019317a87361de6bbc7df6701357216bdd9b404522cede34a719a5500", + "blk.6.attn_v.bias": "d846269db9cd77ae28da26ba0914cace1b6754bd5301af9c44607085dfcbd2d7", + "blk.6.attn_v.weight": "06567c433e8a391647633291b50828a076ad7c2436106bb9278c60a3f8fccb3b", + "blk.6.ffn_down.weight": "f15f66f56b3c474eac8c6315c5fff07c3e29c6e483d7efd4d303c7f43814be91", + "blk.6.ffn_gate.weight": "47768f89c6da8eefb29adb766ff4eb38c9dfd79320bbc1386248319fcbcf567f", + "blk.6.ffn_norm.weight": "7f8195e6b148212967145fc9d86ce36b699cff0de026042245c2d344f1ef8510", + "blk.6.ffn_up.weight": "53d7707ae4347aadb445289f9f87a008b72df5cb855b00080a605442fdd8edf3", + "blk.7.attn_k.bias": "63e274df3217dde25b8369a383e480fe4f6b403a74385f15ac0b5db71dce2744", + "blk.7.attn_k.weight": "f6fce88602f5945eee09767acbcad387d132614e6da39ae359f2bbf380d94b1f", + "blk.7.attn_norm.weight": "bbf5dc7336c0f9a511afef6bf5efeffd78f1b83940850c3eb7eb20c621b75656", + "blk.7.attn_output.weight": "d9fb907a138396a859cecbfcb377927308dc93c24c7fb52dba5eb59265feadec", + "blk.7.attn_q.bias": "f02ba1318346af77e309f40aee716e2de7ee8cab67e67b17636db9bf40894fb0", + "blk.7.attn_q.weight": "54a691e824be287a61c35c172edc01922ed792d2addeee029afc17ba6c7e11b9", + "blk.7.attn_v.bias": "3a4f182f51e84ce862d558fb2751b91802b65d74596bb14d624808513a8a83ec", + "blk.7.attn_v.weight": "a142fe6e106d3ab484e2dc6f9c72b8fc0a385279dde08deb1ad1fd05ac25deb1", + "blk.7.ffn_down.weight": "8daf7e8c430d183a4d6ab3eb575fafa4b5e31689f68b290c8b370411ad9d0f12", + "blk.7.ffn_gate.weight": "a2a786b45eb660994254b48e2aaf22f3e9821cfb383dee0ba04cc4350a2f8e72", + "blk.7.ffn_norm.weight": "73828bbc8c9610cc139fcf03e96272648cdc291263251fe3a67367408deb69e1", + "blk.7.ffn_up.weight": "e85dd0f63fed449ce16893c5795ea6a050a2d7a66d9534410a227e22c905dafa", + "blk.8.attn_k.bias": "91a752a6e2c364e5ee6a015770fe289aece4911ae6c6bbfe74ac52f465465f93", + "blk.8.attn_k.weight": "99c069e92c43a2efb74e23188256b3cabbbe06399878e681ce203a05d5da378a", + "blk.8.attn_norm.weight": "c76d36d3cc06aa2a9edb1abf9f602bb7ed61ac9d61f8ef7ed736a1e619abe717", + "blk.8.attn_output.weight": "ee5ff156a2625e1f203f65e69b514f9df04bd9a5e82b28e3876e16cf1c6f65c5", + "blk.8.attn_q.bias": "8fbd868a93b330c8b0418b488c5301f42a7eb0c58445a4e515d56777f1d96ed5", + "blk.8.attn_q.weight": "9f20ef86e80098ba52a3a31ebcc315bea3a614dac9cba7ac1db02f156db9b577", + "blk.8.attn_v.bias": "c4813571d5d618742183a7890c0b89cd7f18e210c758f63aad564659bc38a26d", + "blk.8.attn_v.weight": "ea88e1a4cf8bd56e9a88ada427d2b0cd352234827640757ee2a9ed594fb67a53", + "blk.8.ffn_down.weight": "b0d1a7495811580b189aaa3e20ea871d6d01ed7b6c23e59825078ef786944ff2", + "blk.8.ffn_gate.weight": "0a17c0caa0b06721c49b59b2a63a5dcbf744dd1cffa55962b404ba910c658a62", + "blk.8.ffn_norm.weight": "f15f109d4a8e9d1ff7c71fa5bc6373df7ee80c5f7d1de3fa0d4849d747e36bcb", + "blk.8.ffn_up.weight": "bbf4c5c4c5c8a0f9ae8b88e3cc8b86f81b98148722d5a350995af176c0b774f2", + "blk.9.attn_k.bias": "a7f60d962686b8ca60f69643e0e0fa8614688be738fb0b1c6bd54de35c2beb5e", + "blk.9.attn_k.weight": "dd80ce4adb00e338fc04b307e4c18a27071f4ba4397184a24d765e6e4a268ef4", + "blk.9.attn_norm.weight": "721e6487547e2b3986ab4b4e2500ceade59d908bccf4436e1e8031f246deb2bd", + "blk.9.attn_output.weight": "5a800af39107b363861e5f5173483cdcd644d8ac3b0c8a443b9c759d71285db8", + "blk.9.attn_q.bias": "0a19b4925ea8ca8067acc909b058adc327de3874cfc94cc9eb4a106d3f370123", + "blk.9.attn_q.weight": "93e84906684c0c7ede79967236d9fc8344da84a9f1daa04e8295c2c9b6b26a24", + "blk.9.attn_v.bias": "615421f812f821e230ecde4e6da35d868823248355ce7e4e51e2d650ead565f9", + "blk.9.attn_v.weight": "7f4913e289aefd9ceecbdaf9767b1e95303f5d59dd67ecb2cc15768477f4d08e", + "blk.9.ffn_down.weight": "95d1b3933221e87dc4af70dd566daec9498bf358070b8d26f1fc70766a84a152", + "blk.9.ffn_gate.weight": "530f2d04f6a1fbffaaa5f2fbc3a328ebed7b330e3af14b4fc7d8a51b13ad8d42", + "blk.9.ffn_norm.weight": "28077de416217ea1df94b96017bef4cc562ab62e51b1a03a671c70abc29ce52a", + "blk.9.ffn_up.weight": "b87b6190778aaee4695938e24ac6c90dbbee6dce7c5c2ab5bc26ba4564581822" + } diff --git a/convert/testdata/all-MiniLM-L6-v2.json b/convert/testdata/all-MiniLM-L6-v2.json new file mode 100644 index 0000000..15c8f03 --- /dev/null +++ b/convert/testdata/all-MiniLM-L6-v2.json @@ -0,0 +1,124 @@ +{ + "general.architecture": "bert", + "general.file_type": "1", + "general.quantization_version": "2", + "bert.attention.causal": "false", + "bert.attention.head_count": "12", + "bert.attention.layer_norm_epsilon": "1e-12", + "bert.block_count": "6", + "bert.context_length": "512", + "bert.embedding_length": "384", + "bert.feed_forward_length": "1536", + "bert.pooling_type": "1", + "tokenizer.ggml.model": "bert", + "tokenizer.ggml.padding_token_id": "0", + "tokenizer.ggml.unknown_token_id": "100", + "tokenizer.ggml.cls_token_id": "101", + "tokenizer.ggml.seperator_token_id": "102", + "tokenizer.ggml.mask_token_id": "103", + "tokenizer.ggml.token_type_count": "2", + "tokenizer.ggml.scores": "6db964fe67338aca57790481a390121ff3dd643eebe49f7dd308029ad99abb6f", + "tokenizer.ggml.token_type": "98d247c5404b6b18f05f133b92dd56edf6efefefac326794b00d7b351f6c5aa1", + "tokenizer.ggml.tokens": "9efe405e229a45ff9916f54c475d151d2200cd2ab0006f347abfb069cf096c86", + "token_embd.weight": "8c1ee80a9ea4f65aa385ba30112010068af3d209bebc6e149d3d4589c2cd0a5a", + "position_embd.weight": "6c516f0b1c4e2388ab90394dd80ad69e4e4509b890982fc3408108ae66210eb6", + "token_types.weight": "f879f8e422ed211948f28b560d3c5e17aae7993f063b51196a28cf5c0fb3da21", + "token_embd_norm.weight": "75076e095d717aab96f8b6beeee503c27940d9a76f2b891a0e3de72f8a6043e4", + "token_embd_norm.bias": "298735285ffe944e1bf03e5d35c7280326b85cf121bde9874f1af5dc51ab939d", + "blk.0.attn_q.weight": "ab0923ce4c1549175112dcdfcc860fe30137f991e03ea6857fb5993670adaf6c", + "blk.0.attn_q.bias": "a3ec29551dabf976e1d34256b8ab5ab7b758f3ed9742c3cafdbd984d5441df62", + "blk.0.attn_k.weight": "4c1038a6d035c3e9ffed7fa672b614627814752503755fbad0cfb76a41ad71ba", + "blk.0.attn_k.bias": "e0363930eb588d91816aa3d230bb03b6e2551c165117b80b8d60397413819ef9", + "blk.0.attn_v.weight": "425e2e53e3f00ce98d29c3e6a161eb55d3e6ae0d96fdb9f6242d1c4fd6eef4b3", + "blk.0.attn_v.bias": "6579173a1e65ee124fbd0bd53cbdca4225515b4f2c5f18fb1bfd000f5978f9bb", + "blk.0.attn_output.weight": "a6d70a08cd7164de5d12af65d86d657c3db35aaecde778b2b3fda9193c4c9802", + "blk.0.attn_output.bias": "2b8d12c4f9a9c5bfaa29c597839568f6e0525cb41eeaf64ddeb6bd84dfeb9701", + "blk.0.attn_output_norm.weight": "bbe6e502a473228b525aeed26cc31b7db123ad63bdc5a6eebac6ea70b8b51d62", + "blk.0.attn_output_norm.bias": "36eaacaf0007c5c62daea97aab0115390c0682914f78482e37eb76885f4b7a50", + "blk.0.ffn_up.weight": "24654561c76ce387d125759ba843f06b904ef721fcceaeff6ccc62180a48e874", + "blk.0.ffn_up.bias": "fd3f0126aa1d95768fa60eb6f4ab8a2763cfcb7e5405f35b92353031d86f4d34", + "blk.0.ffn_down.weight": "97a829763a6a5bf3329ceb4d39c424ba4787d61653a5b0bbd1f84782e4d4e0ca", + "blk.0.ffn_down.bias": "7aa980c30ae8b4ee7f69df28808dbf5c431f56ccc4a80340f644a0419f16c054", + "blk.0.layer_output_norm.weight": "ef30dad4c2a083ae1ff5039a2a6cda60ecc89bf1e486a6f8c0d15f50589603f8", + "blk.0.layer_output_norm.bias": "8b1b77e67568b1bce43fc476de1b177c53ff688d66beb66995e8eb3dc290da8a", + "blk.1.attn_q.weight": "284331622a1f6f9b87ccee4f652bd66a394ca493c4d93be4d1844e4f6159ad10", + "blk.1.attn_q.bias": "e24ebd4860330e08f6bfdd077a82db0bee33f4c8846cf1db26327a34754c7069", + "blk.1.attn_k.weight": "729dd0d555544b5bd0f7580b3c8b384256b974605f0e7487b95f295aa032997d", + "blk.1.attn_k.bias": "2aa51a828a858f35473f54477583fea54ce2ccc34ea60fbd1d228fbe9bca827f", + "blk.1.attn_v.weight": "6be304671cc311d5ca5c103f2b51467ee800c589bc5b8101e09ff5aed1f68c21", + "blk.1.attn_v.bias": "43bcbab78a8819e07f723bc9e5b737b71e87a7594f15234e882b63e327a64199", + "blk.1.attn_output.weight": "15ec8a1a12b26c9976445308a09f748ab0e4bef0f583d13ab08c3129f8738d73", + "blk.1.attn_output.bias": "dac2146f4baa6ed16f6c0dc7443831fb7ec79bedcceafd80d1a4b628a1bb072d", + "blk.1.attn_output_norm.weight": "d2151eb33bffac536787a4c9a5d2b31c7a80b17c4611877842a3cce2cd6e98d8", + "blk.1.attn_output_norm.bias": "31e1b779716dafb855d2cf5631ee168a0ccf372eb9c6ea6091f66fa97a9b9d2d", + "blk.1.ffn_up.weight": "a57547fc3fc3b77406f5cdcb0c87af9bc184701f175c39c1f35297826fce3cc7", + "blk.1.ffn_up.bias": "123be6d541d086202913c75d878c54d59a749f3af7b58f7ef9eb9e7c62a24c9a", + "blk.1.ffn_down.weight": "cfdb79788377e5cbded8790cd41b9e66c397ecab75474071fcd7cf32d30f9613", + "blk.1.ffn_down.bias": "bcb58315519a573097960891c9ae41cf4c685ab78c3e0e77471471758a7eae88", + "blk.1.layer_output_norm.weight": "819b554271452bfb1d84c2603b90377b2e41a0ac1e3aa8b417ccf9dce63375bd", + "blk.1.layer_output_norm.bias": "47a3433ac27f5ce8947fb38dd491f3706df4ef6adb0ddf74612bf0f54b19e164", + "blk.2.attn_q.weight": "1557a9ea852b1880551f7290e00aded4f35e6c4180fdcbed1b0039bf805f639e", + "blk.2.attn_q.bias": "c3bfe5f3066f655fd36b055530997b59ff33ef013563aaeb3cb8ff07dabd59a9", + "blk.2.attn_k.weight": "cfd08eb69c61ae2f9f14f9b7ff5c5394ca264b1a9f3d48156677f90dd1766289", + "blk.2.attn_k.bias": "9b839bc0e79974a0b3f5d1895972bc6f5c9a1bc16052e1af786e6a530758152d", + "blk.2.attn_v.weight": "02b26b1208480eaeeb00e7b4cf8b690006ca14759357fc44ed4a2a8924ead993", + "blk.2.attn_v.bias": "e7e6f0089fded1659a867ab736c220d9653ea7da6b1b94baf5c8d30a748b63ab", + "blk.2.attn_output.weight": "a1db121c7d33806b349cadd050300a57db49fdc91224fd07c9ac43bf4299dc79", + "blk.2.attn_output.bias": "7675128b6a92555cd955c820311e91e9417d31f48848f45d047b4100c62148b3", + "blk.2.attn_output_norm.weight": "5b4595e0fbcba67a700c4331adf746d2fba3546364a4db5607ae241947bb1a21", + "blk.2.attn_output_norm.bias": "7b8e16826ea30e5a2ba0b02e0095a901775981a296e98819625320e983060d08", + "blk.2.ffn_up.weight": "a0d815d946ac07a65095c4ae4df77b818845e6d97795c7d82f55e689d944db59", + "blk.2.ffn_up.bias": "ce37c0a4174d6bf773ded7bd016ede627ad3bdb8bc99b9992a18dc8e8898f252", + "blk.2.ffn_down.weight": "f6231d2a25426fbd45b9f1160aa484220eb227ceef0348c4a6a6de890606e5ef", + "blk.2.ffn_down.bias": "429e00556e8dc63a785238b309b9d83738500c1ef6d736fe6526ad88ea496d27", + "blk.2.layer_output_norm.weight": "651457a573adf3f7dd9ee5dfe1c8e89389e94443993aab77ec6a0b05aa621e35", + "blk.2.layer_output_norm.bias": "41fbbeda7fd89b0cef5f945ae44011c316982390401d6f75ba8c6d365e185247", + "blk.3.attn_q.weight": "95a43f32949d2cb8d22815bb27a44abfc6665ba96221af817dfe058cb6ca72c6", + "blk.3.attn_q.bias": "f4e34385e75d8108b6b3bd336106e2133a8c9be0cc343dfe5dc48c32a823c7cb", + "blk.3.attn_k.weight": "6b892da6a17d4d3265265a15f695864a31813ee8c8e710ae9bc9e1adbc6c9a18", + "blk.3.attn_k.bias": "40b8067b641a56014cee42548240aa8930820958b1933004892b5f04fbaef39e", + "blk.3.attn_v.weight": "9fcd5922319dd2a461082a5ce040c1dfe65d87d70ca6547dd0b46eeecc3eeb2b", + "blk.3.attn_v.bias": "b528c56212e66931fdbe267ac327a9c2f87cd03baff3ea719e30afe681da15f1", + "blk.3.attn_output.weight": "e3b178c1b03981e75510e0d277af23ea59cc404b5394e61bd32291825719b502", + "blk.3.attn_output.bias": "712c84d39a6a5a9c06a09da8fd9939ba0d5525524a4bba61ea4de09b48f45cae", + "blk.3.attn_output_norm.weight": "d1ffac88e675592ff72f8a617be32b4a381d443b2f8f2645dbe44a1e5745aac0", + "blk.3.attn_output_norm.bias": "ea31a1c73146234c50e0e43f485c458413714867b8e2703af66482f7db2d6c40", + "blk.3.ffn_up.weight": "4ef4f3b9a1ea6ab2ef2eb6e8b008e06a44790d099d97482a05a51e39a29afac0", + "blk.3.ffn_up.bias": "06a4296dda16f452675c51f108079fe7722552d6521c737d97734943818b9a2b", + "blk.3.ffn_down.weight": "f114b2bebe392c7d80433bb880c6730293aa4561b0b0370dcdaf7472daebd847", + "blk.3.ffn_down.bias": "2c8e67831d28a3bf613fc7912ae3259b63d72abcaf4d30efd8800758400158de", + "blk.3.layer_output_norm.weight": "a1dfeb7b5a51dd56447312ca41e2ad2f361a3ea12ddc355127f5f4219fb0a482", + "blk.3.layer_output_norm.bias": "1ed630021b25c6c6fc93fd32988b9907df966d4982a93081f639aac3044618ab", + "blk.4.attn_q.weight": "b5fae4c1f9a5f33a2a2e816ac0c01c25f422e4efdd59ef1ed93da2610e5370fc", + "blk.4.attn_q.bias": "c2e376524ea98ac3b10d9eee19ecb1b1e261fa5149efe0232844c923dfb428fb", + "blk.4.attn_k.weight": "a4632f5ebf9321d9d08f9112a4e5dda2efe5671df4a4e67fee24845f5b14af16", + "blk.4.attn_k.bias": "a9a02ffb8b8b4f6dfe487a7e0341f1d5318c9d2b793a688f34cb1b22fc66ef60", + "blk.4.attn_v.weight": "10ad8deb81d9fa093b1e5c0f24ea82aa7df43e6aca49e260fcbea56eab8cc86a", + "blk.4.attn_v.bias": "7326813e181e021130bd33ac136293fcffccce2d1d8cb59041e5b13a8cceacf6", + "blk.4.attn_output.weight": "c92573088c7437c2b3cda51490e152c27fb19e5468df591eabba5a49d5398d44", + "blk.4.attn_output.bias": "14e10b419e5859af1eb685af5c330aee67048cd704dcead9217840c6f5393222", + "blk.4.attn_output_norm.weight": "02b6831c0e0fb0edbc579a92812a1dd972cb15d14fcd382d4427c5a7b300ac44", + "blk.4.attn_output_norm.bias": "7eed5cd503bb6bb6ceb1bc8b07cc077903a4f14fb8b9d6cdf39644815ecf1374", + "blk.4.ffn_up.weight": "8d0c91d62e74d6431321116a37cf3339e630bd50ba164d3304fc4fe8dd831223", + "blk.4.ffn_up.bias": "d325f07f73c005a273c484c7be8e7abb4d6e8a5c4fd093f5869133b97629d017", + "blk.4.ffn_down.weight": "7ba7bd81143f40537b84f938e403e19f30e4928625eb371de052b9025beb4d21", + "blk.4.ffn_down.bias": "2853d9c2a75288214a4bf4907dc19d04d01926f4913d302b1aa7bdbfcce0f7a1", + "blk.4.layer_output_norm.weight": "a4ed1885fa77b90fed5300c355ef0aa0c876a8c747151d9d790939d464d57d4f", + "blk.4.layer_output_norm.bias": "62142a81e813a9e636333b2b805d6bc3b17c5e7cd4b15adce1ada6bc9a32563c", + "blk.5.attn_q.weight": "afc1dff080a72c3daad01384b1448d476aaf789871017c8ff8e144788887995d", + "blk.5.attn_q.bias": "748a820371c1d4f872c84545b36358d239c35bf6c99e2812c237d88c3292763b", + "blk.5.attn_k.weight": "59e30c1ed8acd2cbb01de5f62e7804015b9ecf98ba157d98cab016344639eda5", + "blk.5.attn_k.bias": "f839520078f9e589496e982e86d0126c7aa14196047339abffcf49a696229f77", + "blk.5.attn_v.weight": "3e21fb874e21b90308e1f46af034a3c32d3eba1628d62ae5f2246d6af5818923", + "blk.5.attn_v.bias": "5cd4852bf95c1444d10d756750f6bf49f842c0b39e9953c7f408bb67c325ac8c", + "blk.5.attn_output.weight": "636ce6a7752895f204b9d01ba0aedd9a294f908b42f372c22a16d9dd590d7471", + "blk.5.attn_output.bias": "82d924d4b0d2b94f2bbff91619216d6967a3541ce9b1531a6a60457a67b5d219", + "blk.5.attn_output_norm.weight": "5e7bd0a8d3396080f3360d7c4700bf094a06216431bd014c4479eef72ecf4271", + "blk.5.attn_output_norm.bias": "66c6de5edda5466d029c6753780be81ccd4218bf8bc00680000e0f06856ab712", + "blk.5.ffn_up.weight": "5bbf6e7ea380e216e33f8bee06d25f2265359d3876a300e92bc6e41d48e33430", + "blk.5.ffn_up.bias": "9d795388bb36fb33ad3a37fea3ccb4937838e02800a608fb47d363cd06b47370", + "blk.5.ffn_down.weight": "2fd628974e7f075479dd227b46fbd48ae8d3ca34d735b36f391ac06410730368", + "blk.5.ffn_down.bias": "cd213ba9eaa75fa541648097fbe9c96e58077e6c3ad6ad2fb1f21f8350f44291", + "blk.5.layer_output_norm.weight": "159a9df41d15b7022d136f86a2a2631c4635f9816e957472217077b522bcf52a", + "blk.5.layer_output_norm.bias": "24c1f27ffd1eb4e5be7e3a2909943e6f0980635d761fa1efdd0c19645da23766" +} diff --git a/convert/testdata/c4ai-command-r-v01.json b/convert/testdata/c4ai-command-r-v01.json new file mode 100644 index 0000000..935ec35 --- /dev/null +++ b/convert/testdata/c4ai-command-r-v01.json @@ -0,0 +1,344 @@ +{ + "general.architecture": "command-r", + "general.name": "command-r", + "command-r.attention.head_count": "64", + "command-r.attention.head_count_kv": "64", + "command-r.attention.layer_norm_epsilon": "1e-05", + "command-r.block_count": "40", + "command-r.context_length": "131072", + "command-r.embedding_length": "8192", + "command-r.feed_forward_length": "22528", + "command-r.logit_scale": "0.0625", + "command-r.rope.freq_base": "8e+06", + "command-r.rope.scaling.type": "none", + "tokenizer.ggml.add_bos_token": "true", + "tokenizer.ggml.add_eos_token": "false", + "tokenizer.ggml.bos_token_id": "5", + "tokenizer.ggml.eos_token_id": "255001", + "tokenizer.ggml.merges": "902a060cac8884a5793d2a857dd2e53a259de46c8d08c4deb243c239671e1350", + "tokenizer.ggml.model": "gpt2", + "tokenizer.ggml.padding_token_id": "0", + "tokenizer.ggml.token_type": "b7a352ccd1c99d4413bcf452c2db707b0526d0e1216616b865560fab80296462", + "tokenizer.ggml.tokens": "815ac90ff23565081522d7258f46648c8a0619eb847a9c7c31b238a9b984e4ae", + "blk.0.attn_k.weight": "6fcfdb466f9ceb1229404ce4ec4e480751b8d00da12707a11783dad7256cb864", + "blk.0.attn_norm.weight": "6063317f731371864049c7704a70772f1eb632194201ebdc2ed0f8e483507c72", + "blk.0.attn_output.weight": "920f49716a1e2fc73b6794ec777947f1c122701e63ed302422ac89e90f06e9da", + "blk.0.attn_q.weight": "ddbcd7cde197e632564ac58e4f25d9e3a8ca52917329eeb6081eb41a797932ab", + "blk.0.attn_v.weight": "318fc02a189d87420f0cbf57f47f11e00c21ec1ed472ce0a2a895b44f7fa0fca", + "blk.0.ffn_down.weight": "aa71975b6eb1f4c77b03d2ac4a194cf8d95718efac741bb12f0f3ff79a27f9bc", + "blk.0.ffn_gate.weight": "42967702fa0bc738b88dc50007ace26dbe74a5a9e0978124dd093f818241a9e1", + "blk.0.ffn_up.weight": "5282c8788b086bd30f46525e7995a17464882a72703fd27165491afdd8bfd4af", + "blk.1.attn_k.weight": "cd248882e64fd2c3402c44790ebe12440133dc671b6893fdad0564c461973adc", + "blk.1.attn_norm.weight": "ba84e1c8fd30af6ec94208db4078befac8c921aad3acb887812887f3282ea2be", + "blk.1.attn_output.weight": "2efa3ef7c5666ccceb05e339b83ad680cc0d2c3ec78203f5da5959f23a80e14f", + "blk.1.attn_q.weight": "5106f2e255358a1303c22e8b5f0ec044852bb30a866c52cabefd30017a7a6b7d", + "blk.1.attn_v.weight": "a211a634a1a5df1d5f973645438be0461dd922210f9747c6b04e386c7f1ebe95", + "blk.1.ffn_down.weight": "37093afe48d32c578ec956c9ed85242cd000d6aa979e60526aafa10c822dbb10", + "blk.1.ffn_gate.weight": "469860819e9159caefb1aad0bc66db790f3393f05fd87b08e52256a7ed256543", + "blk.1.ffn_up.weight": "736742c97d35d1a011f9cafd3c0ce947ad559bb2fba6da73c816f6bfd0fa9aeb", + "blk.2.attn_k.weight": "92c219d92804d832ab404bd6dc7339c90877bb7cf405dd030c121f8b27757739", + "blk.2.attn_norm.weight": "61e4466069474b76b6d1e702566420eb669faf3556b00ff7b824784aca13a2d6", + "blk.2.attn_output.weight": "d2fb38a2b2171fd91caf037faa585a62225819aa232d86fd4f7f9d2c3c8a45e9", + "blk.2.attn_q.weight": "f6faf5cc6844e3daa4f9f68d90f5458c64879de68a7728860e38374e30c3429d", + "blk.2.attn_v.weight": "f340ef8f7341d987a6f37c0e9afe0aef5be67be00c0ce5f57612daf73319cce1", + "blk.2.ffn_down.weight": "c7be61a701d779860b621b143fb6365b607bf99ec7c0f153b07908ac8120885a", + "blk.2.ffn_gate.weight": "b64f0878187bd3392abfa4c3e8ad2f8b4c133903e54246747ff8f3b4639ad83e", + "blk.2.ffn_up.weight": "50b11c712652e90ee7428dbb45cffebb80662ac982bc72bd9eafff361b5eb5a8", + "blk.3.attn_k.weight": "2b7bcbe9ee5c9c630c8c8d7483887e78b73581016f4cbb6933db2a147a25f431", + "blk.3.attn_norm.weight": "0181dac7f4eee7252980323e8032cf339bef2046ce0a16c0fd72af7c98a8a37b", + "blk.3.attn_output.weight": "aef8843b636ce231da9e7c9acbee197883cc15df0e2887709324c6a50f16da7b", + "blk.3.attn_q.weight": "55404130fa10e81322d33eb378aa0de31a92990ce7730f1338c0ace0406bb1b1", + "blk.3.attn_v.weight": "76f7fb8040d82b957d689ce34fea2302a6640ad5bbaa0052ad2b7ebce270c33d", + "blk.3.ffn_down.weight": "648628933eff3b357c3729c33c5b1ae51c28e59b9c19acd1601a2ff7c5d5d9a5", + "blk.3.ffn_gate.weight": "6a588885d16e98d5f50ebed05af089154f680085ca9c97691e5b489088630a4a", + "blk.3.ffn_up.weight": "e12455a1d702f4986e1a663493e3d5102b367af74d45557522002a35d63ecac2", + "blk.4.attn_k.weight": "40d943380a8a85e4eab147934bf6e16f23cc8ab753f6636526382c074d182288", + "blk.4.attn_norm.weight": "4ab2c098983d4599fe540eef624c4df954adb7473faebda7471ef0ba4134814c", + "blk.4.attn_output.weight": "d14b91e40f58bf4a3c8c2eca0b12bb541de406574af39027d56f6c588a147082", + "blk.4.attn_q.weight": "e1224960a3562107488589f883fa32414bae41712fa8dbd47c5f3e3a7801452f", + "blk.4.attn_v.weight": "063f297bc4aa6e709fc32c4c32e35af7d07d80e83cb939b76adbba858006c03d", + "blk.4.ffn_down.weight": "f88a18020c5e1caaa29596895eb348e76ee5bfad27ed57651a86cd8cd1f9b5aa", + "blk.4.ffn_gate.weight": "48e7e1eed3fb52e92e61d3557dd0ec002418327090e034ce4322fd68542266f8", + "blk.4.ffn_up.weight": "1ca8a7aa17355b6ce0d9ad5539fdad3899fa47fd359c285fbfb31f19f47bf073", + "blk.5.attn_k.weight": "2bdf15f8e73d068d972380f25d207004cf0bf3b5bfa46946803ba6fba07d9175", + "blk.5.attn_norm.weight": "60448d7cde6e1b6467aa31bdea012e39cdb08c88081cee7d102dca4f93f766ef", + "blk.5.attn_output.weight": "f9f687d7c457537f9fca8a4087a59f1c3bebfaf5537b94e42c831a13224f7799", + "blk.5.attn_q.weight": "987db7a2ad68657a92625e1980effbb1f79697c2183f2b9f3b3a0570c51b0ab9", + "blk.5.attn_v.weight": "cf696891148f3e4783ad1d20f93462ae091eb8651c656bba9b662253b6263e02", + "blk.5.ffn_down.weight": "c0662b0bd0929136005fb9d691fdd9b2c33867d9ce9622339a6a456b720b059a", + "blk.5.ffn_gate.weight": "200bbdfab615d7a3a84719b6ced7751e3ce52757ef212d96f87798bc1de5e987", + "blk.5.ffn_up.weight": "df5d23e7e035fb1b9d163da7ddfdfe38da6a37e86e96534dc02ad20f011b55b3", + "blk.6.attn_k.weight": "c0dae2d272a7c5a2fa004bbb8475dbab362fc1f6d008e73d5a4434a9382ac6ba", + "blk.6.attn_norm.weight": "51c57ac8b55e04354d5dca6bb9c0cf4177639d3b038e80209e33036209688f64", + "blk.6.attn_output.weight": "229d97892c62f85bcdf431675250e01c976ad69ffa450b01fb543bf88f14a2fb", + "blk.6.attn_q.weight": "c20e49621821bd46ed156e6823864a5bda4f317750e71ab8dc54e44eb48cf7c2", + "blk.6.attn_v.weight": "53ceb1a2ee43fce3c7b5b33c58a9fc5ee7f44dc1c6f29bc9dbefc37582102dc9", + "blk.6.ffn_down.weight": "7923c943b7629d560a032d1efa210d1d75c6692140f1be94464ee7ed24f44ed0", + "blk.6.ffn_gate.weight": "57593d350361af753a6a39f53b066282634c0fb44f396f6f2966a574b01d8f8c", + "blk.6.ffn_up.weight": "327b6a7a387098b8899d3ded04a4d4e7c658ca61b80d4e7b17594be232721602", + "blk.7.attn_k.weight": "9ca48b87a10116fd8868e62b76f211d4bb91f166096be9061439ee2e1c3a5c20", + "blk.7.attn_norm.weight": "cd56cfcc4e2ad6b96e23ea7b0d32b4caf236107d99a0b22c56760b62e63c8cfd", + "blk.7.attn_output.weight": "7352b509a03cae2491ffc060e577d189341a0f861233f18c96f9d275dc4234bf", + "blk.7.attn_q.weight": "2b3791c8c008c33ddbe12bedba8191322ceea2dcce5cf0eb7a93d40ad254e672", + "blk.7.attn_v.weight": "3ae721d52466487a3d48150581e57f6d64ea1e83ab929f23b28c3d777422eeb6", + "blk.7.ffn_down.weight": "3b6fa8ececdb3c34af3a5363863d6f94289c1c95bf47fce3a3ddcf184c5f0848", + "blk.7.ffn_gate.weight": "dbd7df6c5ae5eb4adb859f0d36453813a4e289a359a1ba8f72d67fcbf21c3e22", + "blk.7.ffn_up.weight": "de68380a334b4c5cfd4c318b0e9854aec59bd79aa0f0c30af3f56414f83482b0", + "blk.8.attn_k.weight": "7303c4e4480abc72a7ee271811311199245fb5c2ea27a2bd3b8cad3a53a03c27", + "blk.8.attn_norm.weight": "2e3d1921898d1b943ce1a1b6818546c8b471d6d542da24f51a8b514b8c3dd4ef", + "blk.8.attn_output.weight": "30421520887b66bf97a18dbcdc283bc8d0b60590b612fd638a319a6eae923227", + "blk.8.attn_q.weight": "73e064d5433c9b500068a1c31744dbd53f4ade298fb450a0e8c97f62cf1f8a8d", + "blk.8.attn_v.weight": "27e21f8b9a9a8533e8178ca34a72aa1d786393d57302b7806dcdf3e51de511a8", + "blk.8.ffn_down.weight": "bf694bd8e00047982108000e7b3dee7b225db8b19abc595e5697b6bbefd92e7c", + "blk.8.ffn_gate.weight": "d55fdbf8606d9141b774b0500c58944fd1253b9e69d1f765eaa9a680b9f2ca40", + "blk.8.ffn_up.weight": "1ae3f580655e7c8e8dd6c34fa4ac574fdfc5e3f1a8536da0c5442d3a2976f0e7", + "blk.9.attn_k.weight": "b18080626012d8aabcf78542d6c7bf31c712bf55a70172fbfe173fcf34481036", + "blk.9.attn_norm.weight": "2e3620620dc09998c6d3063a7d5de5433fbbae8c11e5b00d13f145d39140e162", + "blk.9.attn_output.weight": "69c3c0e27ef1c0fc933eeb7b612b70909f18cde238873c0d576a2ba9714ef174", + "blk.9.attn_q.weight": "68330e5aa28a28873c9a6e67f032186ef651df2df5844e0f27094ba349fbe4ab", + "blk.9.attn_v.weight": "3df8d45a102be082d0793a51cb82aa62a43cd0e9d047ba4115ca0f2414b39325", + "blk.9.ffn_down.weight": "1d6cc162b73745b135b4f040a0aac3c06d5135a3dc5b2421e7ee2af48662fd7f", + "blk.9.ffn_gate.weight": "034a9d40fb1e32b534b45f4bccd65cbe43c4a6a3f5d01132bd245ca0005de5fc", + "blk.9.ffn_up.weight": "c838c38d0e1a0ac0da17eb2a66023ed31929f07d8fcfe1cc546df26096c91f0c", + "blk.10.attn_k.weight": "a78507cb72f744b86ceaa032596e74e5571c822d0226d334881169addb32cbd5", + "blk.10.attn_norm.weight": "35f48d0b28ee0e6b4cad4e983925737562d64824be5b168b3e26df3d6b260cf1", + "blk.10.attn_output.weight": "53712db06796de39b131323e7abf9a58551b6d52da6db66a471580386d396252", + "blk.10.attn_q.weight": "efe08429ba196026b81cd1c471e1c7418afd9e966659feb3936b674aa0803b58", + "blk.10.attn_v.weight": "7ec6055e134f89da0cbe79ec9f13ef2e442ac584b1f03c3e13e7d0cdad0078bd", + "blk.10.ffn_down.weight": "37e66af4bcd1f3079e841e892255b8255070655901864ea3a8c602a7f681a640", + "blk.10.ffn_gate.weight": "1825282bc34830d371c6edcc3c1e73e6ecc1e10f4aea0122dbb7acc1d6f7b1bc", + "blk.10.ffn_up.weight": "819b3b276a4d4c14a35ed6682d5ef18a5e8ed468e5ce3f12e8c75ec18ac20ec4", + "blk.11.attn_k.weight": "5327e6a2af82dfff0619a14971f5864a15553c36fead84e1af42c7630f2729c6", + "blk.11.attn_norm.weight": "fec363b3c4a43036d2c635fb8aa9e122dd87ee79811839f2f6cd955be3373e7b", + "blk.11.attn_output.weight": "ccf7b38f18ee8798b8a6a35018e2df3eb3e007de62876befb68025dd66c79763", + "blk.11.attn_q.weight": "da8c4a1c824ffe174e39f126cd72f7ef83c56aff1259d452a1212de80f98f5e9", + "blk.11.attn_v.weight": "d17ae6bb77f03982b55d341eb67acb5969e9ad3da5994b96eafc09793dcfe3a0", + "blk.11.ffn_down.weight": "a6bac521e2791345f22c57205fa1c2f2f687794dfd24d0e98d50ae0d0eb6088a", + "blk.11.ffn_gate.weight": "5ed902c488cb51ba5635f3df08258c5f84f31a679a00211ea5f9d8b824ef6d9d", + "blk.11.ffn_up.weight": "ee9f1437eb890d2cf9df2574afa1cecf20aafdd847cd75b152d7eb74419afd34", + "blk.12.attn_k.weight": "5a069c06e1019b0f889088e67458f7a11ec77fa190ada6069e46211f62219947", + "blk.12.attn_norm.weight": "194d7e5fcc8c49aea62daf1940532419cf3c505afdce6be377286b677db5db8f", + "blk.12.attn_output.weight": "6534995fd4d6fecb55e317add4b1723aba4d825e1e9471d0b08813dfdc247176", + "blk.12.attn_q.weight": "4ab51ca519b5995581fa34f846276feca3b907ef2b51f192f6cc0b3263c3f5a2", + "blk.12.attn_v.weight": "5652ca3fa81ef9a1ac1543d71fc6813f8517f8ec54b25c701f6f98061614830f", + "blk.12.ffn_down.weight": "4b2c263f54c88516b8eb273bb8d9615b01c5c8b484dc70358adb91b50b300edd", + "blk.12.ffn_gate.weight": "8f50c3c3e3e8568991d6c1b0e74b500cf4f208e7700bbb8e87c3f6a6d359b6b5", + "blk.12.ffn_up.weight": "1c1a581fec1fbe959e1427fa513f400100b5e1ee9d83932630be9905fb49c231", + "blk.13.attn_k.weight": "efd7a38c46f08d8376d82974f33c644e3a02220e142d63b1704718699a8a884c", + "blk.13.attn_norm.weight": "d28fa4f1bd75abbd063b0e622e08f579c89cd0c0c5ce63c1952ec9f944f8ee13", + "blk.13.attn_output.weight": "71e0068a639288718bdb70a6cfdefd50bc8b3ec3993347a65129e70001ca5827", + "blk.13.attn_q.weight": "b97077adc92cff07a2e07d80ee38f214ad8713571c69cd5c70ebd43dc501ac87", + "blk.13.attn_v.weight": "79b3e2749ab4b459c81e96e322b215f1e8af645eb346e176c326bd00cf6ed2fd", + "blk.13.ffn_down.weight": "9f8687d11effa1db7cfecf7bec5631734bcf2962aad74a9f519144491e08ec85", + "blk.13.ffn_gate.weight": "7d14dfa0543852e7777fe8fff29ca533744cbcf1ebcf10067e5adfc4eb345e65", + "blk.13.ffn_up.weight": "852b9527b97fdab211ff3f832a660ee1d93ccb56906144c50f01319a6e8ee615", + "blk.14.attn_k.weight": "79e926b20f36f66d58226cb358881f2f68ae7b468787d33cafae5110287a14a0", + "blk.14.attn_norm.weight": "97d481b63deb0df6142c2c6cd23043720c62eb609e390f47a7113751c79974ec", + "blk.14.attn_output.weight": "aa6e94d7176d5c79fbb89b96e5f13ce75702ce3dd23ee52986446da436a6c3d6", + "blk.14.attn_q.weight": "214becb6d1bb460da9fb8ace0f99b9a5afa9edf7aa7acc19606c7401b11d6305", + "blk.14.attn_v.weight": "488b0e6d7f1a7a2ed0972aaa6d10ef9c775ee5373460324efcf5b3e3da9311df", + "blk.14.ffn_down.weight": "29c7ad16cf9542e30996a1a01ab95b844533b28051f04cc7949c371afb796471", + "blk.14.ffn_gate.weight": "b7ef208f2b054803665b377f5a5980c122c026841809cf855c6ba06d1c3a885a", + "blk.14.ffn_up.weight": "76a5cc28100748d79c4398ce7b9176aab4d661548b6293a82f99144812e5b70e", + "blk.15.attn_k.weight": "a6b8f9e98ab878fa7ebc5d080978ebf2d050acc2ab2fa8ea9188eb10e27702c8", + "blk.15.attn_norm.weight": "a26d07a9752d6dccb68e3a8a2a49fd0752cdd0a415e05547819bc37d9ba63d5e", + "blk.15.attn_output.weight": "c63616c69048ccbee801e05be4f56d21fda21aa0cc470f41d57c31b4d9283a4d", + "blk.15.attn_q.weight": "fd595a67bf96c6ba16eb148a9d02fa52fa3c1d33ed10be28a08f851409fd6e64", + "blk.15.attn_v.weight": "1c5c9d33fa07c05d5f4ed0032c6c4aa83d863f0d31c94a66109d239dcd03cea3", + "blk.15.ffn_down.weight": "585ea62ab8aff7d7d212ea5c1a03226fda6b68370c890b776834af70c948dcbc", + "blk.15.ffn_gate.weight": "a13c63f86f879b03a573d5dd2a25cfd1f4dc73e8132e6454ecc23e538b4cdf6f", + "blk.15.ffn_up.weight": "f7112450f57c12fcd511f049e0dc0b541625a107a7901c3261ed9e984299f65c", + "blk.16.attn_k.weight": "2d2c8b11dd71fba6d1c106aa1673c113a5448653cca7eab897c8739212ed5003", + "blk.16.attn_norm.weight": "95c2ec7be9469690e18a9a1779684acb3e9da44b13e263a0da840305646fbf8a", + "blk.16.attn_output.weight": "31a65046e677f54dae654ded4e733479fcc0f7283d83076b7dc7cbcae8528230", + "blk.16.attn_q.weight": "bfc6292b9c6d49b7118d08060242a138182eb182d136ba5dfaf469437c16081d", + "blk.16.attn_v.weight": "68f81d037340217d87c7853ff4d6edfbc46d9e827ee6d5bff7c3f6238e3a95ad", + "blk.16.ffn_down.weight": "bbd6629691950cef4d5113e1c6670e91b216a9b872cb92cee02dfda4d6c4f7b8", + "blk.16.ffn_gate.weight": "63cb56f282b7401ed6c76e5bb6fdf1bf68a64f9af0c82c014209b55bcb5191d0", + "blk.16.ffn_up.weight": "b54f39a2541063cbfb6f713aa81c3b69a04100e999aa2ebbeec195dc382eceec", + "blk.17.attn_k.weight": "3d9ba49799cc56664ec30a002bcad61eb651294212a68c3ddb573eb042aef5a4", + "blk.17.attn_norm.weight": "42ee0db4b9d63257bca0012a30b12737ead1caafeb5ed3d93c8f48ffec4b46de", + "blk.17.attn_output.weight": "a38fd100f05c9041c592bc739e287de0b10d08ef2bda41a879225bdca9002f71", + "blk.17.attn_q.weight": "8a3bee285b0180a9eb35662e449ee4cbe16d992bdd48fb3a94bc4a347728cfa2", + "blk.17.attn_v.weight": "d7f8f1b8b863494ed4392a1656775912e9b264ad36016547b12e832a1d6757d6", + "blk.17.ffn_down.weight": "bb7ee58f61da8630972e25b621996fbe8ec06f4dc9ab1e268ab5b120c526ca28", + "blk.17.ffn_gate.weight": "6b652dbf167fee09a45ebfd78d500ff6548fb2756dbe5343ffec3f7e6207179f", + "blk.17.ffn_up.weight": "3b67f727e55e742715de978fab80457781e7a3762bc48f79d13b45dcb8de664c", + "blk.18.attn_k.weight": "ff7fe57c57b90c6fcc0aefc39ec24593c3a7d1ea1c23770480075a015450e0f5", + "blk.18.attn_norm.weight": "1d40faca082d2633ef0ccf19e121870dd6c7c3e2154607c7f3543fa96e99cb2d", + "blk.18.attn_output.weight": "9adfecaaa397a92db4687efd5fcabfa0daef9e6b0493763b7ff5ebc185c43a6c", + "blk.18.attn_q.weight": "ad1803eb9b291948639277afe981e666b07167eb3fcae903ba5b73bf86d8f50b", + "blk.18.attn_v.weight": "308cf23399adccf27401a4ab60d74dac6fb9d4cd4b9c5940d9145118d1881b34", + "blk.18.ffn_down.weight": "7de4ac9a561fb580619b745687dfd7ca8a69ef70471dee978741b80e9ff7bead", + "blk.18.ffn_gate.weight": "0c66970f696b33bd5ee8f1f2fbcb41fd78fa5ccabdc927e11a4d5a4089f19c69", + "blk.18.ffn_up.weight": "66a42e988e8a1f468fabf976c48e9e4bb045eaac6916ef16555ac101cd674abc", + "blk.19.attn_k.weight": "a928ab50390bacbcebe2e4b66922498134ce22d7b93beaa87d6cf4ab52eb7174", + "blk.19.attn_norm.weight": "b4a02c55b46c2a96aec9c64a254087cf48e6c1d4b6f31782c77a46fc4daebad1", + "blk.19.attn_output.weight": "b768319c641dff1eac5d1f8ceb960c9899c795bf2b24c1d6bf70aa24fda45f77", + "blk.19.attn_q.weight": "79ef3f57d187d3954a26362096e1b6c222d76f537dff73e034d6e9999935b8bc", + "blk.19.attn_v.weight": "ce13d6b13e24fcb2d5bc6a2662e5bd295b31b12db10a6d0307f86cf29b8d5001", + "blk.19.ffn_down.weight": "cf90d7e2137482cfd50934a8223ad774621d08554969da80a9712df5e6227eb0", + "blk.19.ffn_gate.weight": "71ce30150f003b6eeb3bf7464e05b6ae615f135110d8e47f0a47fd973e537c0f", + "blk.19.ffn_up.weight": "7f92aca0cc29866633feec701ec01a85a8ee2fd4e2b9630173a6cffb1d9d50ee", + "blk.20.attn_k.weight": "a2df23159d6fb74ef28e14b61028fe8b00a693a2fc9234a980be74f20b958682", + "blk.20.attn_norm.weight": "c6cd5f1b096fc5efa4eb59ca1c8c4bd28730f3dcedd59a63601663eccc6724ed", + "blk.20.attn_output.weight": "896a8a166d0f006d4b09867ae4345426303cbc3fb13a18d3d4e1bde00f16dbdf", + "blk.20.attn_q.weight": "01eb79588fe61baea0da43e99f4dc5939590e1bafd01e12dadb8326f102bfea2", + "blk.20.attn_v.weight": "bd39630fdd5a7c859ac1addaf53e63faf524c3f32f5f4896d86b6e746b1d5c06", + "blk.20.ffn_down.weight": "0304a5d39957a0e3f031c4bcc4549a135d396c8d97c8d276fd1c823ce86560c2", + "blk.20.ffn_gate.weight": "117b79d595b1dca0c8b37586beaecc4d84411507276212dc286cde7fc36c9bef", + "blk.20.ffn_up.weight": "6e799346db145c125f01783539749d3828fcc451cd4f10c5352f047a47e28714", + "blk.21.attn_k.weight": "1c37e4c0664147e775bb006b226b9553e3421140cd96288ea755f81731ab80ba", + "blk.21.attn_norm.weight": "00ae783a29000ccda5e4bdbff03df0752fb82805dc3f9b987500ebd80714476e", + "blk.21.attn_output.weight": "7588b84f9fb19f15095b5265c60b4a4e7ae74bcc47d4607dfa5d0bfab6f136cb", + "blk.21.attn_q.weight": "a65f1c0dd06d45bb97532d3e932689c1eecfe7359089b39174a96a149335cbc1", + "blk.21.attn_v.weight": "4220b77e7d5e8709b4eef33a679b5dad11f297085ef44c9977f9e54ef08f7a2d", + "blk.21.ffn_down.weight": "b8c082a0530d4b5328e67db0df84c5498f2af956de23c639fa0198ffea853950", + "blk.21.ffn_gate.weight": "cd1b656ee72d00e9835ef667c19ef89a88de261eb8eb7c0e936e0f9ddf83ef9f", + "blk.21.ffn_up.weight": "dc445f73e36ec7a3bd86884186b728f8e0187f32848c3b8b69d4d41f8571bf31", + "blk.22.attn_k.weight": "e37cf0b893ec8b9ee8c78dd139b8d9c45cb997a3bc0c3d93a70ca1c3f6af8859", + "blk.22.attn_norm.weight": "248a27838d3c46cc03a5c312facc84e2e0e2c990ef8401e93da25918497f88d1", + "blk.22.attn_output.weight": "fc191a18f6d18332c66761f7ab28008bfe295dd1f5c8741a2488442f9e00d0f5", + "blk.22.attn_q.weight": "4b193a2ab8bc2b085db18f2bf3eeba26e02b537b2cdd738160c8f14b165d0f5a", + "blk.22.attn_v.weight": "7a60ce5ccac7e045e55ba1e1e85bd2a0f93f8c781daee96c5223665e22f0c666", + "blk.22.ffn_down.weight": "e0a34fb4244e2c7168f3dbaa1904c15d339ec39999cdf27128bbaf619ee0a237", + "blk.22.ffn_gate.weight": "8bac872d4b8549c8812f927efa309f1792b524f33601095fff61b826de5a5615", + "blk.22.ffn_up.weight": "b67fa2b94dd901b6ec64c0853ce8ca2d86fe9cb1cc6d2f15fbbbe0e691c0c648", + "blk.23.attn_k.weight": "2c32e66ad01942b819ac09a197c71579fe66f02226a264fdd72ad1e02c67a27e", + "blk.23.attn_norm.weight": "825fdc94deb439cb93c713eeb077c1052b90ed658d6d464fc4ad3d611e911d48", + "blk.23.attn_output.weight": "95ca6707a95b8750b0c7c5d379d368f0f2e7ebef631954e7d4d8ec0f41f13a3a", + "blk.23.attn_q.weight": "6eccc84faca5fac015d1b26e2854501edcfd292a302228fe14cf99f5eb59a34b", + "blk.23.attn_v.weight": "b343ac3d226040f1033ee049668aa1d89b1774bc18431965682e5dbdce78ccdc", + "blk.23.ffn_down.weight": "9fc599befea8d3b1e342d564a110074f66d2542df406c4b90b6bdc5828fbb2b2", + "blk.23.ffn_gate.weight": "488556c1b0c9f0b20b0c99b4bac2e0f4046b81edb601d7b91e7e5b3bab47d667", + "blk.23.ffn_up.weight": "1088e291d7008dd9c7c2dd6830af686a8a84b724d123a016209bd5156d6898f1", + "blk.24.attn_k.weight": "a923fbe35e61e009a53927d7828818e0592bb737d6a1106c4b0b5a1efc367e07", + "blk.24.attn_norm.weight": "9b51aaaa939cefafdd9b13a7e5b74ac7fa2d603427e55a16a909d6f3f353750a", + "blk.24.attn_output.weight": "1beb2baba56f8409466434b037771248c2f620ec5f53e15f44c271d5a2d9ecf4", + "blk.24.attn_q.weight": "4b0194fe5bfae0c6bf6131dcf8cb6e2b994f6ea10b27cb03574f0f4f8cc0c950", + "blk.24.attn_v.weight": "6ac34b1ab0f66226d85bca1194a7c212cd93d384ecbc8b8395de48aec0970a61", + "blk.24.ffn_down.weight": "5508f74cb732a662c2936b32ac5e90742d172b9f961a747b0e5cba0e5906a89d", + "blk.24.ffn_gate.weight": "095e39b8584403835f9bb1ac33e0e81f54175575e4800273d281b845bff381e7", + "blk.24.ffn_up.weight": "2d43ec21637dda12973de367b0113ee9840b0d815bf6fce042f7c3f270b0b530", + "blk.25.attn_k.weight": "9e2aee029f3d2c7f67dfc7926e72c8228fb978382c8e5a4701bbf82c93801419", + "blk.25.attn_norm.weight": "220cd7164fb4cdbe22d26058e4153b26c27c7b5ce2bec8e95bf2c0ea08d23103", + "blk.25.attn_output.weight": "a17f4a5dc6aa51f03dbd75602d98e9491767c205cdc2c3a5f8667fc54bbf7c64", + "blk.25.attn_q.weight": "f60827496835c440c794bf57ce9780704d10a59d8229886bf75ebb18900ba4ef", + "blk.25.attn_v.weight": "9cac217e9e9f4f4c85f14ee51165a77c580165bd4a34b202389169bbe61a1ced", + "blk.25.ffn_down.weight": "a0f36949b663e80849581dfb71e7babcc73580793bbcb0c80ab26d5a6e000359", + "blk.25.ffn_gate.weight": "df4d1be4d50d6afe5ad3ef0d0e0fac76a33e85c963dea769641d612dd53e7d13", + "blk.25.ffn_up.weight": "992da76be762632e25ebc5ef4d03728eece1b43f7c4e31827df19ca724aea694", + "blk.26.attn_k.weight": "34199ff856ac32a500c754539d070258574192a34ecba87a182897cb59fdff52", + "blk.26.attn_norm.weight": "a8e9dfb2dae5d22b5c0aec5f3675991c0e3c3e6a44153db2579136b73f456e00", + "blk.26.attn_output.weight": "1c4f257ffb0d7db0f11cfb275e38b4af736917b43ad82de1badce3f1d227da4d", + "blk.26.attn_q.weight": "33d55786274c2e718cf61e8fbecf3dfa5ee0c208f0b716d42b061f55459acb3c", + "blk.26.attn_v.weight": "684b636939cd4ffcfec5a6238a0790ffa43d853c95783af9b9e8275e74071a7a", + "blk.26.ffn_down.weight": "89d0bf066db154e6d312b5433aed1714f6a28b40f4c52e3e1530ee07703303c8", + "blk.26.ffn_gate.weight": "393d649bebe5e2940e1b043649f6c860b4b8b9f380f30e9da1744a830f358156", + "blk.26.ffn_up.weight": "179edc85ababd9d8440cc6093eecd1004290aa1cb96434b26ecf7585b6cca17b", + "blk.27.attn_k.weight": "334841445a7f1e14731b08f56eb0b1f0938c63823d28bc6d078c4c5f05b36f19", + "blk.27.attn_norm.weight": "57344471bbda2e9deffdfdb2dd05a07aa47f8761e24de53525588639145bf551", + "blk.27.attn_output.weight": "506126af9ee54b535d49f97e36f630e74834f480329f098d6d62e96246d8d65a", + "blk.27.attn_q.weight": "dd984df1acb4783849e25ba7ae378bfd385cd9efc540fb798cd5bdd873f0118f", + "blk.27.attn_v.weight": "b4b3fe9a4455d34c297ff20a2f537b647cef424741d840a747b265f23d320ac0", + "blk.27.ffn_down.weight": "621fdb185ba0d35ba5476dae73d2c81ec1482a0e878d5bfd5c3b29fe837af013", + "blk.27.ffn_gate.weight": "e4fbab45f2ec506fa374103251a0bdb7baa6f576080bdd796f3e9db92098e08f", + "blk.27.ffn_up.weight": "a0c57e463e988002bbd6a6c6792baa21a65e6f89ae303a2c301951b0ae6e4bbe", + "blk.28.attn_k.weight": "bac36cbd52ec5056841663865e1291ddab4b47ef9a2544dd285d4503bfb0e4a0", + "blk.28.attn_norm.weight": "5774a9df2bbb2e86d1f70179c7b92d81e1f401160148b3328fb64db6646a5425", + "blk.28.attn_output.weight": "e8712622d1569557000c75f26c3f55fad267fd300463c2c2cfe3afbfa1c8f908", + "blk.28.attn_q.weight": "11677751fddee52cc739699c02836f7be54d96038be4240be5d4f53d00161608", + "blk.28.attn_v.weight": "e5ee459b8958d65e1445997b9aa1e90e2f5d17761ebcf5357313119a45322507", + "blk.28.ffn_down.weight": "3934518f9f85292da8475fe38a8edcbfc4e24ac56c351b472d6351f98750871e", + "blk.28.ffn_gate.weight": "6ba735d57e98d0847e487f25ffaa25256deaa8abec76f428cb70bd9774279d83", + "blk.28.ffn_up.weight": "977fae6e1e5353114fc645dd98429464749758765cbc6e6457593d596e57850c", + "blk.29.attn_k.weight": "8122a457307d580ad6f1e0acea09a2f593d97f595ba0d6737f5fea16d2433642", + "blk.29.attn_norm.weight": "d626f721e05aa1202439b01027031d4caf1adace61ed37870a277cb6297c77cc", + "blk.29.attn_output.weight": "7fb7122ab1b6b1e6615ca746897da27bc52c92cb70d3147183cdde61795b72b3", + "blk.29.attn_q.weight": "be43e94ff6b6e391024dc824101efa0ddf4005d5b002ac26cb03765c0c73c2fa", + "blk.29.attn_v.weight": "af93c85ebff908f74f9935b81bde0516ca487c84139868a1ce079c3ae20036b1", + "blk.29.ffn_down.weight": "39dae12340ed3120bd19c495fe0872b559613641e41fde69d02d8631900b84c0", + "blk.29.ffn_gate.weight": "36fd482439840ef197c9f3b8905d86acfcea49bcf018544106ca465d4bf8d5c7", + "blk.29.ffn_up.weight": "5243fbdfdc1e2a1dd84b6210a9869d18a014db9088897e345240cdc99990bd5d", + "blk.30.attn_k.weight": "948f263616bd3788b2b968baafd69b9c5bd1b77578665f096c4b7e247b4cea42", + "blk.30.attn_norm.weight": "e168df981e744874ff303faf2eb470e5f6868c2040ba5f383f6c5148669975e7", + "blk.30.attn_output.weight": "4cf0ccca04b792573b756655a24fc89cfb1f272da8305633f0bc66ef14990b93", + "blk.30.attn_q.weight": "21e07d6cba6c50d65350289258209717174a13c42be57e8141d69712cbaf32c1", + "blk.30.attn_v.weight": "65a8ca29c7237b3182ccf03e2fc94e84f9a53d0e160fb679ab401c853170dd9c", + "blk.30.ffn_down.weight": "8b00500a6d00d84058f6658ee1d6f06fb4fcae2f90d4341792259362923b3c13", + "blk.30.ffn_gate.weight": "5bc0e19ab7a31b50ac2118ad1b36e31055271a322cd8ff661d47c3ac0210703c", + "blk.30.ffn_up.weight": "f37a0561955725bd59ee2d064fa9f4e00a12a1b620b624db3bc3add5330bc321", + "blk.31.attn_k.weight": "9a5663edda227f5d87533897146764f8e8a7481b9e71fae197c39204f8463221", + "blk.31.attn_norm.weight": "060a4f438a1ee5e220b5b5278ad2f5c085a428bf38c515766781815597c87529", + "blk.31.attn_output.weight": "6ada5d3cad9dea4780ffbb43302bb6ccc2f24eddd0fc4f5f84c9ce0fc0c6e5dd", + "blk.31.attn_q.weight": "bb5d08c08603907981ad388d5d8b70fcc9b98034ba264b8474c8890cc0297af0", + "blk.31.attn_v.weight": "e01b4252ea9c6a889c32b21144b441a347464d04536ef4f6572425be55759796", + "blk.31.ffn_down.weight": "8ba4d679c36e93ba65ba03180385ef35ea86b3b7cdf2fded9df59369f1c09630", + "blk.31.ffn_gate.weight": "e5b41dc93645f8b5e8eebae3ada3ea43a18f97ce2654228655170b07b463ccb0", + "blk.31.ffn_up.weight": "25b88cdddc8b547af294ed107d3d1312e90b983cae87936fa6062ecd8ea02539", + "blk.32.attn_k.weight": "4bcf86dc0858c8ca2fbdf6aa76674d43eb698f78979fdc1a38f556a7af1facc4", + "blk.32.attn_norm.weight": "cdcc12f3b8b9773c6722736bfb748a2729230b21478cbcc4104859d3148df815", + "blk.32.attn_output.weight": "d43f1196822995ed89a9365c97054753a8b30ce20b6e273c8edcc42673a1e141", + "blk.32.attn_q.weight": "ebf2972bb3865cbc5be4840113a322089752038344beab2a0122c7cb4fb399b6", + "blk.32.attn_v.weight": "714db81704ff34fa137512903c1013acee7877467473e46600728b9240582eb7", + "blk.32.ffn_down.weight": "2cde3da1258bb170a79d5d3cdfe10c86a71eb34b77da46b74c5ed71e7f4fe274", + "blk.32.ffn_gate.weight": "c7e1ed792532613ff9d4e5834b6536e2e0f47df2303bc0fdaa90aac0c1f4e8db", + "blk.32.ffn_up.weight": "d8d6f13fe66a716e28f79101a29817f0c0d6f99969a6f017d51bafd1a16c600c", + "blk.33.attn_k.weight": "a0a28f6cbca88da00cab2ca37094d9b0503bf9defdae77b91895b911c408cbb6", + "blk.33.attn_norm.weight": "0251200c24cc8445607ace6dc8c5aa0566567997262b7cca53a11ac23cc564b2", + "blk.33.attn_output.weight": "b2423205bdf6a1096d43c44d8d12f1a84fcd4e1bb70fcf6dc8542b8b8a71a13c", + "blk.33.attn_q.weight": "00b425c3ef71065ce5e0234e702bf38143b4952da78a85f52ab2c2e3073d97ab", + "blk.33.attn_v.weight": "035edd2335df816c42c765a5e66b9d9b9e15a822a8dc1863508145499c942c14", + "blk.33.ffn_down.weight": "4894a923a3db75bae4496ba3ce5f28796ad31fe33996a066271fb8654964310e", + "blk.33.ffn_gate.weight": "8f6c819b8bbfbe3357fae89e1ac5a3d58be85b3b04be3bacf7b62775869046ff", + "blk.33.ffn_up.weight": "257c3544b5b544fd5d839665bf5caf107a329b59dbc3751efcaa24ae63c56179", + "blk.34.attn_k.weight": "b6cd8bba892e38dac4a2ebc3ba1bce49e71b967fc436fde30c6d76f54a18935f", + "blk.34.attn_norm.weight": "2b3c8e60a064cba9955752bbbbdd92c71ba5c2f1bd721097bdbe88b5abc68787", + "blk.34.attn_output.weight": "8cc272551c9aaca9db5a660c6927bab94a0243d74a30b2bc165f06bd577714ea", + "blk.34.attn_q.weight": "74b561eb4792484e6a94b58fe2583848c3ae28ff2f1bf3d02939a0cfdfa49990", + "blk.34.attn_v.weight": "dba19e24ff05154dc5a1f55c023729303a583d13d68732ce22ea74d4410dc8f0", + "blk.34.ffn_down.weight": "76eca5dfeb274c35774e0bf9f22ee420ed9085c8e99aa2cd5a236e4918b44c61", + "blk.34.ffn_gate.weight": "9af0862d5fcbc24732846488e653db8242a467765c0cdbc00332b3a40256b4a6", + "blk.34.ffn_up.weight": "2a03126bf73587eaba99ece2066103d12e47bcd4ce30ff6c17b2f383b81d40df", + "blk.35.attn_k.weight": "52513fc0cd4e997a842729af7d21dd09399bce0a339558374738be266d0fa2f0", + "blk.35.attn_norm.weight": "e5281fa911964263ccf1630b14762edbd41d0b9472d6ec695fc600fed4892c35", + "blk.35.attn_output.weight": "b391d6705d5dc6f48326b5fd16573f679edf64109d86fb729a498819676590ca", + "blk.35.attn_q.weight": "d16446921966db9b0e0539626ad22a2511ace780e59379d6a4162d8c5441440b", + "blk.35.attn_v.weight": "9d8cdf23ffdb0c5c74106843390b94b24c9f33ef0eb9998d39f78c73390101ea", + "blk.35.ffn_down.weight": "938eb6301f7bbf162d7dd965682a5ed11d0a4a530c6fedd7e5469ce80012fc17", + "blk.35.ffn_gate.weight": "5ad84f5a0c8edcfea1ecf1a3e3d21d85ceda0c4ad9e3c6ca68885eeff8ed3c2f", + "blk.35.ffn_up.weight": "1c4330d9dc71bf4c98812c34356c51f520f47610a534152aa6d29284b758090d", + "blk.36.attn_k.weight": "ef720655e5ca2465f13db2dfc4732fb4ef2c9d53acde52f514fd4f301e974081", + "blk.36.attn_norm.weight": "88f4b9310b3c8c2644e3029160cd35678c79dfa59280430e03f5c29a6fe84a58", + "blk.36.attn_output.weight": "aec6f915fffd7bb72cd783273e871b4f09605950089d45e72059d1316b6c4b01", + "blk.36.attn_q.weight": "72f9408a2405d42f8db6ce5fcf1d26a3660b6f225fc60e77d0277109cfcb82ed", + "blk.36.attn_v.weight": "0f3b3d851dc44b3893ef53f6cca5b4acc9658bacfe1cc2d13c3d704ddd409b67", + "blk.36.ffn_down.weight": "470aec48ce8c5129a6654d9fd26fcae72776f9fc1429a8bb05818072a876475d", + "blk.36.ffn_gate.weight": "7f5f296d09cf55679767b5d15de3eff489c456782119f25204be4b1647f18dcf", + "blk.36.ffn_up.weight": "b7ef74a1f7ffb4982711d93f1787be3a70edc3d2358d5203c41d8900508037d4", + "blk.37.attn_k.weight": "c4ffa5412e4ff2dcfe1aed991c1f54169fd171a4c7638e4b9f21a1ca64c5e1d6", + "blk.37.attn_norm.weight": "4eb6c888d841cccfacf5b963f8611120f6ff24b84af0b5714fd9ab36dcda422f", + "blk.37.attn_output.weight": "db2a7bbf9682f9f6eea672dae8e150738f1bf74dbc80edc7022017a3f040c8ac", + "blk.37.attn_q.weight": "e38c0462aff139afcbab289189823527e453abc9e541154adde5e7af88cacf0b", + "blk.37.attn_v.weight": "952eb2492ed452a72f96bcc12d4b2affad9dfdf46ee39ce4a5d7b57a5dc301e5", + "blk.37.ffn_down.weight": "25f23a8fbc44febf6dc4848fd7fe03a580e2822bd3b3b5a51f4990826bfe3e4e", + "blk.37.ffn_gate.weight": "707da5eb40118b035305d3262444382351f170a20a537386a70e90c5a83a7817", + "blk.37.ffn_up.weight": "d2d2ba5cfc4ef47338dd7384219e22bf030a5a2209e0354d88f5bbaaafd20e87", + "blk.38.attn_k.weight": "abc4bb189dedf7ce661e79028427623a4f91ac091c2cd60e31b58bc62b1cda71", + "blk.38.attn_norm.weight": "9f4803a7d03fd40fcb83d85f84eb1d5682ea4e5bb084f210c02850675d804c3d", + "blk.38.attn_output.weight": "77cb66007f1a41df7135d0e7f900ceb499c2f667dfc3f1a6ac01a3203bbd3ccf", + "blk.38.attn_q.weight": "d94a8b26cd375bf2bcaa76597e314aa8268ee50a479d00931e5e0e021feadb5d", + "blk.38.attn_v.weight": "660c907888bc5016dc69b7d35fe6f55c7ded697c93be0e2d332a2f17aff88758", + "blk.38.ffn_down.weight": "6f06173bae5b00ffaf88ef383619a8b9c6a8d0d5c6494695d17f6c1de1a68a13", + "blk.38.ffn_gate.weight": "89f99be149d03f116527bfcabe073c50001c874de40fb6e817f6619027f3cd05", + "blk.38.ffn_up.weight": "8d57557c8d5e2d2688b73f01dddf1ce8d5194990cda6358153320aea88aac7f8", + "blk.39.attn_k.weight": "21be09c988b46c8393e6c2ec9230f3b5136eb7607dd1953ba92d0811c2f0dd75", + "blk.39.attn_norm.weight": "ba7c1912dd1c4e2d16917201f62396fd0600e4a451137eaddff255548c209abd", + "blk.39.attn_output.weight": "acfaf4abb3fd27fd899b5563c3877f176b597d8f6cdb2f2fd3f3a0bd4da15ed6", + "blk.39.attn_q.weight": "e8adbc140d4c8f0db2a27ca584c5531d5b1e080555fe627e34d80d0814a92bed", + "blk.39.attn_v.weight": "92f96b0e1f724e73a0f90a76c145654418844c04a6d4b14c05eb5af8a62bf8dc", + "blk.39.ffn_down.weight": "4d9ee7c65fc16fe95d10c47b79ac6a525741947600a64b5fcea5d300a82c50de", + "blk.39.ffn_gate.weight": "7e18507989f39b32191133d2657c2ee3b74f42f070579204d727eb72215793d1", + "blk.39.ffn_up.weight": "22cda752269c9757ba918abede1df95bb0f83a5c772dea13c8deea3d5f2723d9", + "output_norm.weight": "2858cf0e39d32caf52b7861378ace076000241e147f10b9eb21d8a5cd149e3cb" +} \ No newline at end of file diff --git a/convert/testdata/gemma-2-2b-it.json b/convert/testdata/gemma-2-2b-it.json new file mode 100644 index 0000000..8f34e66 --- /dev/null +++ b/convert/testdata/gemma-2-2b-it.json @@ -0,0 +1,312 @@ +{ + "general.architecture": "gemma2", + "general.file_type": "1", + "general.quantization_version": "2", + "gemma2.block_count": "26", + "gemma2.context_length": "8192", + "gemma2.embedding_length": "2304", + "gemma2.feed_forward_length": "9216", + "gemma2.attention.head_count": "8", + "gemma2.attention.head_count_kv": "4", + "gemma2.attention.key_length": "256", + "gemma2.attention.value_length": "256", + "gemma2.attention.layer_norm_rms_epsilon": "1e-06", + "tokenizer.ggml.model": "llama", + "tokenizer.ggml.add_bos_token": "true", + "tokenizer.ggml.add_eos_token": "false", + "tokenizer.ggml.bos_token_id": "2", + "tokenizer.ggml.eos_token_id": "1", + "tokenizer.ggml.padding_token_id": "0", + "tokenizer.ggml.unknown_token_id": "3", + "tokenizer.ggml.scores": "0872465d173867d755d3ee728f882b9dc2057a0bfd596fe1e3d131522f1250d8", + "tokenizer.ggml.token_type": "8d40143b3477df77beea4139420335ede458bf5e14102f01b0170197b55da8d8", + "tokenizer.ggml.tokens": "c6e66de1841f04de8b8d236d461ab720a4c9b9b5414dc293a09c6e10eab45fda", + "token_embd.weight": "64a9d30707e659e2e673656d71f5aef7a9fb9fd83bb9a77558dfc5abbe218a05", + "blk.0.attn_k.weight": "d8b4437c5edb3cddf6af9987038e1bb2b191c4f0fce0e160d2abace717f5d5d7", + "blk.0.attn_norm.weight": "1eb73e3f7aa8e502f6ca31cd19efbb8e4fd9a89692e13e48ac8205545a7fa7e8", + "blk.0.attn_output.weight": "39e7b78e57d356a22dd89ce1c4d7163b970712ba756545e1703f97866cd2192e", + "blk.0.attn_q.weight": "795058e23b6109febd9d55c89e1eebe6af0714ec8c56fd86a160876a6135ffe8", + "blk.0.attn_v.weight": "0cd6e583d1887c020472e961bbb113fe5a0d23ae2f1c2c876fc366cdb7692b52", + "blk.0.ffn_down.weight": "51eb4d962189e945a84e94e0dc1aad3f8f90cc1a11e18029670afcd0ea0acb1b", + "blk.0.ffn_gate.weight": "9811a29b8ad48432925897ab21dfcb13c5cbd372aeccbbefca9b7866883b4ce3", + "blk.0.ffn_norm.weight": "92cbf4652ef503c1de5b10f2be00b3fcf00100980cb3baa8f3013a8d8bf3d851", + "blk.0.ffn_up.weight": "af87de21746879483ed1b374cdd76b19ba11ca2b6dbb1beba98efdf3be3e8077", + "blk.0.post_attention_norm.weight": "32e135f1f258ffe407018899e39af1725d59d66d60022b9a21575ba160e0357a", + "blk.0.post_ffw_norm.weight": "ba286f5ac11b07fbc986173708c66f1920427be5a6d108af38fa0a837c1c8eb6", + "blk.1.attn_k.weight": "51584435552051f7fade76beca582b3f7190cf7fc07adcf527c2774d4b1c3901", + "blk.1.attn_norm.weight": "6833104c7fbf35a7e799ae56c262b97fffa14789642aee14381b25acd21ed80a", + "blk.1.attn_output.weight": "14c39481369087bf292ac9a3ab2ef166f9fe376a9f90c246653213ef264febdc", + "blk.1.attn_q.weight": "443f64ae2229f857c69d6bebb7800b685786cb77884c3ae19d4286aeed081325", + "blk.1.attn_v.weight": "0df482de2038f1e4c8a7733ac0ddb69ad90759dab5968b942af0155588de4c4a", + "blk.1.ffn_down.weight": "66f30763a8bbbcaea609a0087ed75fadb5e771c06378dd2cea94cf17e492e8cf", + "blk.1.ffn_gate.weight": "a7151bff00a545fa18b2c92dcd2a14572ccf9beb957a6c494f1374e8ebe174c9", + "blk.1.ffn_norm.weight": "e197d71ea11b5276bc0167d2663b88089b3ff42b47ba91e85f6c5d95f6306435", + "blk.1.ffn_up.weight": "57c182e0b14cccd1350d388f0c616991702e74281db54637451b70f4ccc24f9b", + "blk.1.post_attention_norm.weight": "3c56f837168d784c2d8bac247c130bdca6610c095c8da4558c536ccad7605609", + "blk.1.post_ffw_norm.weight": "d2a51d320fd01069dd7ccaa7082f16a7faeb671885607d7900b10a89c354d0fa", + "blk.2.attn_k.weight": "bc103c818192de7ce36caaf89dc117be4df13fb902e0bd9a23c64edace5df9b6", + "blk.2.attn_norm.weight": "0f2503aa126083a5d6ac72481be1ef66c6014705b573682b35bd864e4749a3d5", + "blk.2.attn_output.weight": "05fcd4a1226e482f91803a266f72caca887a93e63c2d2ba5611ab3c68d38743a", + "blk.2.attn_q.weight": "6a10b5c2fd423d1e4c4fd60fa8c154a0159b6b2501ea79cae2ef19f45a674e5e", + "blk.2.attn_v.weight": "3cf891945a1f8ae7cc908a5c6b729ff5b70f4436c5ffdbf245cc0ed4cc19cd1b", + "blk.2.ffn_down.weight": "ea204fd04e0d2fc728a9861a459216bbfec629c152004ba625f52cd8837bd51e", + "blk.2.ffn_gate.weight": "3a3518729f1b8b64a82b8792f33987db5418fdb094be0263c68f146a5c38de54", + "blk.2.ffn_norm.weight": "754ede678b725de41a34b82f0edf7688b5c065be7c0d46df6f7ad9430d986884", + "blk.2.ffn_up.weight": "ffdcb88439f5828ffbd9fc844b03ff91637b790b9838097258cc3ae75935720c", + "blk.2.post_attention_norm.weight": "4b3f53b7ba26e8c36b2dfda3b7e5fc4b1065257cefdea235fc7df9af130ac2fd", + "blk.2.post_ffw_norm.weight": "e550369e26b8485e2b54ad34b34bc98af5494287dcc513c2c39cf1eaa5b89d07", + "blk.3.attn_k.weight": "89f24ea450e37d9e95757651a83205c085d81b354ee9489dd6310a391d8409f3", + "blk.3.attn_norm.weight": "24e2ea662b7cb822b4ca5cd61bc17f2709f406d990ec3b4a0dac1cc112db45cf", + "blk.3.attn_output.weight": "ac4dad69473c6e3fac56669212cadd8c34ecc5973d945972e974d94805334967", + "blk.3.attn_q.weight": "b6a9c9a7d4722b9096631c65de62228dfddca6e26edfe6af7fce01e116ef0f4c", + "blk.3.attn_v.weight": "f272a960a40093942309bc342a379984cbacec2d7bc64428db3f64e6b1887ed4", + "blk.3.ffn_down.weight": "c0188ba50d8228805982029c277fc0e87aa57473b8363037c648f6d006ff828a", + "blk.3.ffn_gate.weight": "a04aec1561ee6c0fbb18c3db49dc62fb533619cf697fd548cbf2279761aaec3b", + "blk.3.ffn_norm.weight": "bc053837d44087ec05eb5d9458357b2a5be787789b19cdbbdc694b57697f99a6", + "blk.3.ffn_up.weight": "b3ce8b274f20796d3b1a7c08ba27a919066f9de89a782faa544c4a8d6bea1382", + "blk.3.post_attention_norm.weight": "9c922dee7a7df5667289e2788e60170238239cee2dfdbbd9e435763f9f416718", + "blk.3.post_ffw_norm.weight": "b682544ac953ad2e0b49027ed8916f2e9d1aba5d1587bb4127ac703570c7a03a", + "blk.4.attn_k.weight": "143b0cbb4b787b95c2b6212374410e32173ccef2adb914908a2f89a7916de512", + "blk.4.attn_norm.weight": "5668f60491b780273745192662d02c9a92a4f692b29d16aa0bbc7413fec4f85b", + "blk.4.attn_output.weight": "b9f2bdb68be1e0cf66dd19f8fa2afb105910ad2ef394864cb32cea8f8944e0d5", + "blk.4.attn_q.weight": "ddcf1343dafbc2dfcd0b8741225af22fe4b54b2becce29240bd01c34265d126c", + "blk.4.attn_v.weight": "6dc7074366e7ed52d9f48c594dcc85bef738e096276cb99d28228c89eecc5b9c", + "blk.4.ffn_down.weight": "30334ffc59ce343cf2a1b973174acb7722823463adc07e19a99bd0f404bc9906", + "blk.4.ffn_gate.weight": "890f7c8af208d63b28db52c4b8c16c2288a382d87ff5a6a6d6b0a5b3bf27e6cd", + "blk.4.ffn_norm.weight": "ff0316cc7847221eb86a90c1ab441d4ee61553d410c66414a7755021b3b12448", + "blk.4.ffn_up.weight": "6af97d113f91564c636734f215e25ee602d48eb045458f300b3ec7582be0f41d", + "blk.4.post_attention_norm.weight": "69438f231e105e68216b078bdeb35a7cdc8b12c4e2845e18ecf4c8d361d6a321", + "blk.4.post_ffw_norm.weight": "0fd535da78bcf2b32c95b05b2b83dc49817393765be90d8cc1ed3d56f47b68ec", + "blk.5.attn_k.weight": "0166eb3c6d20dcf3d3c169e94caa8dee057535bb525e29f698fb6f8844f18a6c", + "blk.5.attn_norm.weight": "a7808f27f164023d5cde2be00fc23cac6c71aa0ddeb60bc23e12411b80087672", + "blk.5.attn_output.weight": "8b65b2027a0842b68c5308f91d6a31de9599d794157d77df8418b19f9e0d9334", + "blk.5.attn_q.weight": "966bc626ef2c2394d872087a41c126bb1b67d1d5f6de920204ef5e5b16c34003", + "blk.5.attn_v.weight": "9a362aef3f4437fbf0ef6e1ba785f3329c3db2960f93fe36547d2795e9c254ea", + "blk.5.ffn_down.weight": "63e53541d34197720c06f297aa8142ac6b6eec002c7987b296f26e8b1400f931", + "blk.5.ffn_gate.weight": "d9591fdd32f783e0fc26e20d5d587ee8971ac8ae2e4c818c6eac1c125c7c7f37", + "blk.5.ffn_norm.weight": "677334cc60ecce3a7f4ab3acda15d359353d7358872f614ad8914e3780e9fc6e", + "blk.5.ffn_up.weight": "a63764110e1c655ffbd55af0669b2dfe4cc29d0e198d33a8e5426461b08a85f7", + "blk.5.post_attention_norm.weight": "c55499f859b2c0a7f5cabceaae47309a5ad38bc29d0f4a8db81f1357023162a9", + "blk.5.post_ffw_norm.weight": "82752754665f842418f3e302cb5f43d1e0504dcd124c4b8ddb77018b2c793837", + "blk.6.attn_k.weight": "e20a5f0d6c807273c8d491439566b428497ac02097cf0aa55e33748c28e14be6", + "blk.6.attn_norm.weight": "2c6ba42fd3c73d72073ced03a32dd28d70a89ed9bbbc8fea1ba03a7ade951e6c", + "blk.6.attn_output.weight": "4de7c5c2f4a133a266e17ed8c14c52959466b54cc7ab9e19f789a33b4850f284", + "blk.6.attn_q.weight": "56462d921800e6b8cd2213fef04c4ff16d728905cb2f4c58e966d0a053a3b0ae", + "blk.6.attn_v.weight": "b758dcbff769d6240c2245ede1dbc62c4170a67c77458e866312589220fe29af", + "blk.6.ffn_down.weight": "582247fb3c2bf687cbe9413fe18d18ad47bef4b65df7d78905e10335c6134764", + "blk.6.ffn_gate.weight": "3035444d5286aefb7a6d04e55bc27e1fac7cf895cd5be02319a431b8e047b4ae", + "blk.6.ffn_norm.weight": "e582d24c66e01b96faa20ce6adfda3d8583b11e809bff89969927398175e369a", + "blk.6.ffn_up.weight": "6f4b7bbfedeacf61a4866ae0616c4ba6c9e856662e8f00ae6aaec7f52c53e7b4", + "blk.6.post_attention_norm.weight": "8fe51b50bd677d21586aecab0b565c4bf9fa68ad50bfe366f45e8fea3c657ca8", + "blk.6.post_ffw_norm.weight": "81ba3cb4c2bf5c546b86855b7a885d3fafededc67eb3a35cd3598b03c9e26e65", + "blk.7.attn_k.weight": "2e044179cdcae0946708c86bfea7aa0391e1f7e2a09b33fca035d384cc3ca758", + "blk.7.attn_norm.weight": "94b48c546b046803c60e75a3acb17a356b710735989938021b565f68df9b4985", + "blk.7.attn_output.weight": "65709b4ad7a581f4d75793d39d4032a359f6bcc0c3835205242a0b99e5b66824", + "blk.7.attn_q.weight": "8ded993c95d1f7caf201ceb6fa035cd6ed6d351b50b999fa9355dfee9486cb5b", + "blk.7.attn_v.weight": "c92d5e2d2d48397542bc03bea25bf39154075e66c5bb1ead85188505aa04ae91", + "blk.7.ffn_down.weight": "e8ba8fb57208805ef1dc23cd7c86e9a2d1fb7c52c3940d292cd5bb2eb24b3fac", + "blk.7.ffn_gate.weight": "f0f06d6a2e06c5ac252083bc61d05c814e6289d3f4e4a87d2f06918254c02c36", + "blk.7.ffn_norm.weight": "ebf8ef775f72624148e09d68a4332187a7a5020c521fe0623da1cd3485ad33e0", + "blk.7.ffn_up.weight": "a554adc4fc7122c247c77670e169916ba1794c787b5be30a2b36705138f1f746", + "blk.7.post_attention_norm.weight": "3aa6bc21d85c3a0c12b964e82b12feaedfdd13130c3cd2229228e24e0967ebdf", + "blk.7.post_ffw_norm.weight": "508bc7b19ee8ff08f0007c890133a462fc57c7e72b16ee8f6dd64def264ef876", + "blk.8.attn_k.weight": "363c8e74056642fe9e7c2f3f9769d57319cd3fa0a6022810189ab8d894322885", + "blk.8.attn_norm.weight": "685b49a1f1acb169f4df0bdd8e3de6943f3033cebad14b898a72000595610d92", + "blk.8.attn_output.weight": "7bde571e4efef1c6a6143f0526721dfb59e0a0ea0e1a3616a322b2eb937efa48", + "blk.8.attn_q.weight": "fc993dbc1074c28a0e1d85e5ab2f4ea6a9c6c1affe7ee56027000a275daed9b6", + "blk.8.attn_v.weight": "281e8791d3aef9b3864f1cb054da0ae0c2fef4ce0a58b1bad8bc136b2fa0f62b", + "blk.8.ffn_down.weight": "b1164a2578a7f87ed99c2bbc76c5dfbbbc6a1a803605391acc3f320fc989ffd7", + "blk.8.ffn_gate.weight": "6b39a3b3aaaa79aee61416b54d62160b9258042650e61c6b47bc77c2dd17daf3", + "blk.8.ffn_norm.weight": "17ea1362c72da27f12bc936500492035bdef3fd8f940cb12b57f37d42ba8ecb1", + "blk.8.ffn_up.weight": "bc3a7c47afc440d2bdf8fbe9ddf2c9220467472c60c8b4ded8c0f181470ec96c", + "blk.8.post_attention_norm.weight": "5c506204e00411ef9c8b4134d40eedcc19fffe68dd0af7d7cc49dcabf2dfac7e", + "blk.8.post_ffw_norm.weight": "002faec235c3678864e2901eed275ce4e9dc229164a91c9cd4c965142ba62305", + "blk.9.attn_k.weight": "0bab39d8c237f1b6d0010db40467142625a9e6f2e0e4c49a56c12b41e4e0b1fa", + "blk.9.attn_norm.weight": "de5f38e873b17f07aa7598831b89cc1cae2c9bc3eb2e042ee9af059d2563e84e", + "blk.9.attn_output.weight": "8a8184702c25a62df9ff309c0c7badc8587208523b2be3e8fa90ce7080573e6f", + "blk.9.attn_q.weight": "7c961b2431b09ddf95377acd07201cb91bf13d9cd3ae0f2c25c7d6a0358d9f50", + "blk.9.attn_v.weight": "e22d240cb4743067033e659cbf210ebe2ebbab3e1dea6ccbe5eaa982382ca038", + "blk.9.ffn_down.weight": "a426f81210f03d6ad53277416e1fdcdf37d8065e4817613edaf6c67a343426be", + "blk.9.ffn_gate.weight": "a82eba825cb77b8e64f85ff99ede2fc71bc9b01751eeb17e9e6c246ee12ea62e", + "blk.9.ffn_norm.weight": "1a97f9b1302a3a326d534c5c3fed2db6db0ae45fd0edd381a3e4fc1c75d81030", + "blk.9.ffn_up.weight": "5f20bac2bbf03bb42adb92fbf99561651e1edda57e0b61935ac7f6c08c0ed7cb", + "blk.9.post_attention_norm.weight": "9f9866d13988e1946b1e1c80d9374a92a6e3be33748f8eaed3e126d1e1a4c796", + "blk.9.post_ffw_norm.weight": "a6896dbf698db4dbbe5dbf12417d4fd80e9cad0c539c858892ec0aa5b046bb58", + "blk.10.attn_k.weight": "ca8446e5d21ecd4e6a70dca8d321be480be4fba94d70cba065205436feb44270", + "blk.10.attn_norm.weight": "4f41fe290e8f21f63b82151b6cce94bf7318d121468816b0c58af0ff7c1658ab", + "blk.10.attn_output.weight": "c626d2e9681c5c941bbde43dddfae1a8d4986bf2be4470857bc8e8bd7f869044", + "blk.10.attn_q.weight": "1e61b210a13a429977325cf15d781ab77d604cfa862f4270329cbd94237d5835", + "blk.10.attn_v.weight": "8ff8d3e3f058ec3b35ada1057f2ed59c06494d0e0be6a8dc3ff9edf9f0e1a115", + "blk.10.ffn_down.weight": "bcebc04219f8081a5f483e58103c0ddbbbc631a0a54fd6dd9d55778e041f70ee", + "blk.10.ffn_gate.weight": "7a23a1e620ef871384ddf9611ccdcfb893fbf013cc203ac8e72f745420f1eea0", + "blk.10.ffn_norm.weight": "e3a375e43c349a1c6c66c22328e513cc1af3137fe839e43dc8e9be2f65914fd7", + "blk.10.ffn_up.weight": "5d182e7c94369194fca5f19cbbe668a999911e57f3d363bc7fb6088428700cb9", + "blk.10.post_attention_norm.weight": "b841c6308296e8984f3c5f549c6e3a242f4b3e19141e1f54cc08de9c46759c09", + "blk.10.post_ffw_norm.weight": "9d66fa05b5c940208f634f5053d809094c99a2a10a1d1e8847c8281fbd99fb49", + "blk.11.attn_k.weight": "14adf24ebb2bb17b336ca81cec3e690fd854782f4440ca6c66cc1d7e7bf1c850", + "blk.11.attn_norm.weight": "2d2213f311f50414702b5b34f22aafb9d9a0b6787243e7578562583dc40ad195", + "blk.11.attn_output.weight": "de1f14cc2a7fff00cf11b229f0576999205f17b9536e97abc9d6de3cc79a7884", + "blk.11.attn_q.weight": "2bcc5c147524003109ece0be08b89ac8b25baa71416ffa76573c6c052ffc6eea", + "blk.11.attn_v.weight": "2e6ab8573070c22dc1e0d7aebe4d52123226dacf7822dcce06fadbb38fb036a4", + "blk.11.ffn_down.weight": "1b86902f4e36868421e5228b9445051f8290b292df22a6d1af836dcecc1f25c3", + "blk.11.ffn_gate.weight": "e756e8081bd0a16aea4a9ef5076ad102113524f7a3d50a3a77aaa7f7938b63e8", + "blk.11.ffn_norm.weight": "6913887267be227cf9d1991a3dd8db2e7e74bb9b5fbdfcb9ac954fd7d7b95b3b", + "blk.11.ffn_up.weight": "619a3ac0609ebdf42c3fb2b6e4b1db48df79e6dd8418d7ab8f1bbff13d8a6a50", + "blk.11.post_attention_norm.weight": "e4b4ba92cef7b6a78407e8ab1b0307d47dac6c3df7b6817e28038317ff662d7e", + "blk.11.post_ffw_norm.weight": "40aceeec58cb855f0c158c9cc217168fcd5d0e735567d587217b1d78df17bc5f", + "blk.12.attn_k.weight": "c54c5a4d4892522022d1aa2204cfc624f0b4042caa536e678967316293fe5cb1", + "blk.12.attn_norm.weight": "7cd2ef58298569ffdf244d9b390f3917245276c8206e5780af5f96d8c0bbb446", + "blk.12.attn_output.weight": "85495ef9cc8b3deb21f741bde463ff6493acae2be51f02ecdeef952cbdec3375", + "blk.12.attn_q.weight": "d19383f83fd119bfb8c0280c9515705c11d8e7d502019fcf8f49efeef0d106d0", + "blk.12.attn_v.weight": "869ac669ba49531d9128892a0e27cef15de508ff40cdf80cc1681dde50d09204", + "blk.12.ffn_down.weight": "578f39f8f9fc2f09138afc884a952d7cc3a9a31de4216acd10e88e19e0b75f8c", + "blk.12.ffn_gate.weight": "e29a0186bc6c4a0720246306e922d3a83f777dadcf4ac80bad468287031cc8b5", + "blk.12.ffn_norm.weight": "e1ee95c6584b5cb57fcf1db8ce2bcc03aff91eb389238c094a61c00dde93d1f2", + "blk.12.ffn_up.weight": "2a826f06d7cdfb3edc6ae250ff44363ef77a2a9cdf96313e23a331b99ebfa17d", + "blk.12.post_attention_norm.weight": "4bafc7699b948d5cbc0d3e09b418b06c6abc4651a61ada9609d9a2f21c7e5607", + "blk.12.post_ffw_norm.weight": "bbb8c34a7176bb1a49f9fe2bacca0bd26b673d52c0835b2e90fa11f2962f077f", + "blk.13.attn_k.weight": "ffeefccfe8255d1b694382012ff4134eee5fec9d9491c8d0ff0a13832d1a37e8", + "blk.13.attn_norm.weight": "35713726529e3887c4135a88e86e8a4d7270ba5b9f2d1ab462622fbf40a7cdce", + "blk.13.attn_output.weight": "0d60b7c5cd71190a9ef4b873b0f516be15447c32d83914db2794b14592b0b460", + "blk.13.attn_q.weight": "8296069e65bef794cefc61257fc65789b3cb22955e30f3df129205e5041b2222", + "blk.13.attn_v.weight": "ca0f4ab9d16a748fc643a5c0c7a19826a811bf2a4e7316a8c935d4bf0ce8abc6", + "blk.13.ffn_down.weight": "d5514e0c8e7b3ed1cbcc1605eb5be1733b6ab3514cf8a0508fc72f7d05ed8bcb", + "blk.13.ffn_gate.weight": "8108e517a82e08a3aefbbd267bfa50a1668f92a76273280ce8a6bc1f6dd61521", + "blk.13.ffn_norm.weight": "5fcb6132d2134bf1f835b904a99820fa501dbc57d2224129f7098bf3cabc1d36", + "blk.13.ffn_up.weight": "6d744b7cd390a3cae3aa350dd379b81246acd056a2259996b6aaadece8465ccc", + "blk.13.post_attention_norm.weight": "e08b14698912509790e9575b8676971fbb0a4d82d719367e3756c0d0c4ab8cc0", + "blk.13.post_ffw_norm.weight": "2b196e4450fc5f1e7367b2cf7fe33a15fe919fbcdd861d11002346f16e980535", + "blk.14.attn_k.weight": "120e5f48d7268dfd9ab5f4bc9cc57a7cec63ea9635f56b80d435eb22936e9483", + "blk.14.attn_norm.weight": "146367bcce4db72cc894419a2e0145a6f533507dd68e4739c10ee480308c401f", + "blk.14.attn_output.weight": "720fa0165e756876c5cb6ad9e2780dd910390933f3f8849e5add5da04266650b", + "blk.14.attn_q.weight": "f5183466f56219ca1aca52d8b82c2d966a4198fea40fdd6b39f4d8b06ca2a6dd", + "blk.14.attn_v.weight": "24f8ea3d5512cd37c43c8329cb0da0c90d1895aef763ac2dcee3fe5157ec50a2", + "blk.14.ffn_down.weight": "e29960965b384ae5ab3d898a4dbaa8fddd28fa0e477ac28bcac49dec12a5ac67", + "blk.14.ffn_gate.weight": "6d0d6a74bfe9692e8f8eedff0fc34fc4fa1c8687794f35f2e2b033ab2d7510b8", + "blk.14.ffn_norm.weight": "f7036c1a9a71e046c9d2af16e9218fda5dbb0f7241ab44747abed1f0f9d602ca", + "blk.14.ffn_up.weight": "7d69ea1424007ffc9c12247dd0308c616e93ac02a59ec341cfa48f92d6ce3b10", + "blk.14.post_attention_norm.weight": "65b9712834d9445d4236bec362f3fb795c20d60c541b3dc6dbb7914d9b493e41", + "blk.14.post_ffw_norm.weight": "9c6a8da2e4e437d5cfdf3b9097e9f8b64bf07946a048badec20f4d374613f38f", + "blk.15.attn_k.weight": "864bc618303a0e4ee67fb1d5e751de61e936cd51e96669dd86f8cd08f2305045", + "blk.15.attn_norm.weight": "f9f4187da6eeadc2fc5921d8fe669741697d16c13d71e4aaeb73b82f50dc577e", + "blk.15.attn_output.weight": "ce2419a0b097036b2a31f2f4ad731d5814bcc2ef4c511786e24471e5eefd273b", + "blk.15.attn_q.weight": "9539db5a970d11ebe99722d1e13fcd635e250033630811efe583d2f97778e4a9", + "blk.15.attn_v.weight": "1c834b48ccd88adaeabb7d8bcb6be0bcd6d5ac1354ce88fc28f19a1a96b81ab3", + "blk.15.ffn_down.weight": "bc1f97a65dde6fa2c1e5397afb612266944b343f2eaa868b635ddd25829f8a42", + "blk.15.ffn_gate.weight": "1b14529d57056b79037f6cb5008132e62cc35992353b38dda59572274623103b", + "blk.15.ffn_norm.weight": "9af77458de9ee55c66f93865759f9c2c398557f94f3fa8fa6af30543d7339cde", + "blk.15.ffn_up.weight": "41d524a26b61a9595816b4fd53cf57ef50a702e4ef32933ff6136dca9136a267", + "blk.15.post_attention_norm.weight": "c60a03cd0e63a7db5c80015e58e9b97ba2208caa19f66a6fef5c4447eca900ce", + "blk.15.post_ffw_norm.weight": "34f7f9f96769215bbc3d17084df091864aef96a6645b7d0b3b7d9bd92f1a4b0b", + "blk.16.attn_k.weight": "7e27240d9f3a8c6cf0f4a980113d43234f514eadc3e3e1792b86efb29ffb1a6d", + "blk.16.attn_norm.weight": "af798acc0899282a30448edec48223b3e8efda177090273e612d8eca5e377301", + "blk.16.attn_output.weight": "79df39a3709d3d53e84146291e0944a7a653d06705293d9ccb5648dceadb432c", + "blk.16.attn_q.weight": "db58a1c3b83ad294804e5fd7321005719e200659173466df5a52a182b80b7165", + "blk.16.attn_v.weight": "2af6d48cbaeb225b5c1a704f76abd89c8ab1521417695b112b4dcc2cbd39b74d", + "blk.16.ffn_down.weight": "fc1c813eb5e7da3d6194569d6cb21602fc6eff2dc8e1b0eb753f2d5df148189c", + "blk.16.ffn_gate.weight": "7a80bcbc42464bd55df4814a6edbd7b5c153e0428323bbe49de55e2d2add33e7", + "blk.16.ffn_norm.weight": "2041685ee926d30f3f2ae4ec35b5688f1cd834167a6359a7d4057eac804c58b2", + "blk.16.ffn_up.weight": "8da4b718973ac1d43b928829bc45e062fd101984d6c98dd825bd7c5d08ebfbe3", + "blk.16.post_attention_norm.weight": "975c48fe680a6167438a106140a8872eee7765191f152d80e3b8ddf47693e095", + "blk.16.post_ffw_norm.weight": "4de2d4d483acfe4fc77860ea929025df2f4e15c10729413f36a18c94eaa6d689", + "blk.17.attn_k.weight": "f937e61f0af8c4cd98ee742648eb60e02e579683e21d421071295a3b70aebaad", + "blk.17.attn_norm.weight": "c3270583ed28b7e423f5b170c59113234f258169b93a867d9274f4c10b7cb115", + "blk.17.attn_output.weight": "b8c1150e81e685e539a5dcf2c19047a24eba2b281fabe166674b1d71ef4612ea", + "blk.17.attn_q.weight": "c255100ae2011e7dc7e3bf3bc3ccd96d859fbb98581cae993d7b82c1ba8e8b39", + "blk.17.attn_v.weight": "5830bb0a555984c6485348067f70b5d22ae337c011aa9248dac2ff4c95944551", + "blk.17.ffn_down.weight": "8ff9a7cccaa3776434a9d895aae4fb5c36c736bf2ec98784226b4c234940fbb0", + "blk.17.ffn_gate.weight": "1b52876739712831c272911533da206f407b46034a1a4ae8a88c1f96b6bd5747", + "blk.17.ffn_norm.weight": "d0e16ba5e87c91b545334e022058c7d03849665c3b1a6298771b656531366b66", + "blk.17.ffn_up.weight": "4dd6211d01dbebbe21052708eddc242b082a58b5f18ed16479e17987c1d3432e", + "blk.17.post_attention_norm.weight": "6f49c775c7417dade77ba8268a0f8441c1e5ec28b5d7e4dc5ed07a04d04600c8", + "blk.17.post_ffw_norm.weight": "b91a0bb2e6679e9c9be06ad323adae441d00a3d673efb19d7c4954be2aa84b27", + "blk.18.attn_k.weight": "22b565ace1b4da8b33865a58625be1d90beea9891f29686a69fa9cf7c93217db", + "blk.18.attn_norm.weight": "3e0160d7063c8753de65d2356a66648e47d921efdc5c917efb8209892120f8db", + "blk.18.attn_output.weight": "e3180f0bb4ca90b31e9b08158db38e332de62dfbaefe34aa94cc316409331e09", + "blk.18.attn_q.weight": "f3a5a83614c3ba7ea41cdd5b1b0819a241ee2a951a381ce4a9e001d3f700ed8f", + "blk.18.attn_v.weight": "f3350a5984fb951fc738adcf78147e6d812ff1c576670c460cafc99c253c1654", + "blk.18.ffn_down.weight": "9e9d09b13a33525e14bdaee6efc65c551ac7cf7680e534b940ab122a3a7c1ac9", + "blk.18.ffn_gate.weight": "ebaec8b4b578a2e8d815baac12f1675c208f80c68074d5a18288a2e1a60680ee", + "blk.18.ffn_norm.weight": "33e7687c53a242f2f8dc7093a491c97b18d4a5a8c14d183f02bd586a770f05aa", + "blk.18.ffn_up.weight": "78a1816662378ce56cc870e705174492781897b3afd2d4d97a51f10f2f2987c1", + "blk.18.post_attention_norm.weight": "a58dde3f12df3e94cbc27d87c8ea86f89af8a388a506446ff6758f05399b05fc", + "blk.18.post_ffw_norm.weight": "cebf90cc143577d483cca27b032dfd82031ee59bdf17c0e2cf60a0a3ad5bf996", + "blk.19.attn_k.weight": "4683375d0599ac9e2232196aae1e90af13a14cae26e865465de5c8e257bb2055", + "blk.19.attn_norm.weight": "f3eba936bfb1814bbcb0a1d62739eb66daac839df8c9c836fe0e94860df88525", + "blk.19.attn_output.weight": "51c0f01d38a9dcfe9bdbc4643576fab164c1d9e4b7168b7695c0ee55e6965667", + "blk.19.attn_q.weight": "28d15b69b8416f2e7ddc88fe381cb1e2ef2ad705fb1c268139ba96498cc74848", + "blk.19.attn_v.weight": "6860f1cd720638e63a981fa2c0b4db900129826bcb9823c9ddf9fb8b1b9f3383", + "blk.19.ffn_down.weight": "bc7f2d7827ee01c2dd41401c7b3b1700ad3a4ff620e8bb734f92630d342dcc7f", + "blk.19.ffn_gate.weight": "54d03ef69ba373fc410fbca8f1e34a565d58e4296d9a035ff7e48340b9c848e7", + "blk.19.ffn_norm.weight": "9178fc796a340ee6e8128ca74c0cb6203d1adbed6927af4e5ac7863da57affc7", + "blk.19.ffn_up.weight": "a77bd708026c6e83ad5c79c223278e74621bcf74a9641c7818d96b595daaad20", + "blk.19.post_attention_norm.weight": "ae94aa26f4c411bf9496a6fd4a6df64ee589ee1ae9a04b531d45acc95721e582", + "blk.19.post_ffw_norm.weight": "9ad210700edeef12133bdcff04bf1c7f62b49f6f4a9ba483c7cdc59857c24a5c", + "blk.20.attn_k.weight": "e35bce1e9f4a7a09ef34721f57ea38cfca68c272f52d923fe50af8308f66cfaa", + "blk.20.attn_norm.weight": "644800f6926fd34f233795c4dec1151a295d2138ca8cac33e3e48167d26f8b41", + "blk.20.attn_output.weight": "8d3758cd236471741e1ad66c0710cb79077dc8c7a3a292d35bc551c0c5abe627", + "blk.20.attn_q.weight": "c333b1f0f6f956b5d73891df10b1a0321e55fc31c40d623a24e1f52caa6a998b", + "blk.20.attn_v.weight": "8562b418d0c4868a050fb19fa3fcaf50a8cf1c669f537d666c80c7b3a04714e1", + "blk.20.ffn_down.weight": "97efb608ac44cc804198faec3ee66eafe56ced6b7ca5359700c6f1df75b7205e", + "blk.20.ffn_gate.weight": "5c61151d86f28415c73c73d90ec088c646cbe5c1640197caf58eb501ba7db293", + "blk.20.ffn_norm.weight": "24bbe0a701afd4bbeea65b3edde712b3cbb2281043bbc43dbf250582453116ed", + "blk.20.ffn_up.weight": "e170cf68e249566aa99eb6f6b265679bf9a5a6b76830ba24e7e130c2515910c4", + "blk.20.post_attention_norm.weight": "e092d751cfe20dbf2d348358f3b38397bd83e4ed94d6bbaa6bbaddcd902b2ac4", + "blk.20.post_ffw_norm.weight": "219a18a47dcba76e669e4322223a5a9227bd3db1de3fbd3d3cfb22e54a783c5a", + "blk.21.attn_k.weight": "c3a095ebddb42c63824f1c98da65263dc88e4d790a26aa1632840b44f5cc7cb1", + "blk.21.attn_norm.weight": "ef8bbaded5fbc45ad9cf3985ae02174524e7090fe6362811124f942ef643bec7", + "blk.21.attn_output.weight": "668f018aba72baac6252aa3ad58569ddd55ab751a0dd8d7bcc9fb9b6efb4bf53", + "blk.21.attn_q.weight": "e759c65663089f3bbbd51847934c185e680c82f1249065d5d487da638e519e6d", + "blk.21.attn_v.weight": "2ff57762686cf9ba1f5a6be76503454b97556ce67f4ac98254bd0562231197ba", + "blk.21.ffn_down.weight": "3fd106556fb721b1c28ae3f4026bc83eb1b08ed910f2ba5f466c6b5f327d91cb", + "blk.21.ffn_gate.weight": "338022d882f4b6619e8054a6fb909696fa3eef3013cf69b65c3cacdfc5b9e42c", + "blk.21.ffn_norm.weight": "1e77660c23a3f9653ee721a863d1960f773d87437cabc4dc0a6e17ee3d4e5e44", + "blk.21.ffn_up.weight": "7d31b20fbc2e6eba8f350f170069dc36f0cb12f68fbc4206ec5022a74085ebcb", + "blk.21.post_attention_norm.weight": "9638bae8d8bdcd7ed68da282979cd84a07c41ff9cabcaea94ebc846a1803db23", + "blk.21.post_ffw_norm.weight": "d622ef11115fe0cbe04b727d5a3b6371e7f39bf08c8d5eb9bc6da52e3f3cfb9d", + "blk.22.attn_k.weight": "5c321cb29deffbe57de200dd206a62005f1e80acb86c4fd2349dd44c8d3594fd", + "blk.22.attn_norm.weight": "198d949705d7170a331d75889d8c7500c3635254dac2cc6aa4dc35d556584536", + "blk.22.attn_output.weight": "19805cd5d7025b457e5d41d70db8b3fd63c2dd0e4a94d3ef1704d50ef4e749e8", + "blk.22.attn_q.weight": "177836cd583fc87405975ddc21ebfebdaa090a0363799664c72caa3da851ae2c", + "blk.22.attn_v.weight": "fea255692483e30d0108f9e4e250eb3ed7dbda8d83f499b06519b8c223ae6096", + "blk.22.ffn_down.weight": "00cb8939f03e5817d6d412de8cf2c923c9568d5493e382cec7faf5718fb034eb", + "blk.22.ffn_gate.weight": "b0591065b91281b2fbd8a9567f3568d40479f680e1f0a29e27ae213f37642489", + "blk.22.ffn_norm.weight": "96b5c5d0737c2ceb8fc869f54adb9e5f46e28cb7b177c40f49fa926b923c00f8", + "blk.22.ffn_up.weight": "81f472185b24344ab0594ea8246cc6e200e0dc1cab4943e74fbe4ca19d5a9701", + "blk.22.post_attention_norm.weight": "27fa9aa6260aa3071e0391e1a1d49322dcb6e8072315b8a9b7064087108dbd06", + "blk.22.post_ffw_norm.weight": "f37e1dcd7f643d9545675ffe9dc527a11eba86eb204989c2f44f636b266d896a", + "blk.23.attn_k.weight": "5d82f36658a56c3f94d0bb2d61f65509c966fa6568f81812e0d3e338b380ef8c", + "blk.23.attn_norm.weight": "b7983f88d9cad88bc88a528923e6da592ad20e699965b223ebc10840fe1f4fec", + "blk.23.attn_output.weight": "59f97f80f430d71606aab0158a195aed29ccd3405e6c0a5c41c809be8eb01898", + "blk.23.attn_q.weight": "53ac4789fe958919cc02ea4222bcd64c0ea1b4baa54304bff46635bdf42f7490", + "blk.23.attn_v.weight": "ec8abe09b9e84dbb52c7a068094657c6d3c62fe551ba8d7c3a3f23da622e9756", + "blk.23.ffn_down.weight": "3cf547eccb1b82aa64f208cee9682d7f558ca84e0aead7d9d3d1420d90f3d992", + "blk.23.ffn_gate.weight": "366aa2486d911ba81eb519119e13807deacf7e9908bc1975a2a63e00d6b10124", + "blk.23.ffn_norm.weight": "6d1d4a4af34bb7dc090ac87d6457d398c3e0fb68bd2e2b60b099dc318b6cfac3", + "blk.23.ffn_up.weight": "53f76692e253f5d2420b3f200c731b9f3b7a83e379920b4a067c729b4674aa4d", + "blk.23.post_attention_norm.weight": "7c952fa0efa76b3f048c8c4c9e8dcb5e3724d231327eda6423a34d3f3d3367de", + "blk.23.post_ffw_norm.weight": "7ab188cfe61f0a91b40309a0ab6bfa99f19d0ff2a37b6ac10e5f0c7f44eb5270", + "blk.24.attn_k.weight": "225798792f9bfdd10eff0505ebe61e0aad0209c17b431f6044ee7968ffe8c198", + "blk.24.attn_norm.weight": "635e3c1ebf5219bbebfc40ef164bc32d2b726ef595a94da64ac524ae878e2915", + "blk.24.attn_output.weight": "482f5bb2db8d9ed22b253d9a3296333b239efe698e5992e5d77e7e12dc2a5cf5", + "blk.24.attn_q.weight": "43805bbccddb65d58fffc4be9b5c374d4e1df1395ec1e1ffb4bcff03e98d5adb", + "blk.24.attn_v.weight": "fa741af54b4a3b1775d32f59134756090c5df2e7345a12a2d8db94fe289667a7", + "blk.24.ffn_down.weight": "83c6351e3162626b276f524a57836144625c2556dbe321b57cbd8fd486a68fab", + "blk.24.ffn_gate.weight": "fbe66be0d84d12cea5176cc7eaef64382ffc7324cd9d6266a3342dc43442f2ac", + "blk.24.ffn_norm.weight": "77c1445a8639ad24938bdf0280233eea2362d47391421833dfa72ec756dfc1e8", + "blk.24.ffn_up.weight": "78235ac729ee23c1cf1ae543751e3af32776d8808cee6e529c2a625a1f027654", + "blk.24.post_attention_norm.weight": "161f71b6d07628d43e4ae51a4c9088ec6ca2db123a17986a14505d83fdd04dad", + "blk.24.post_ffw_norm.weight": "cf1ba692aa683368b02ac413e69b2521b98c69a5274eacbb54165b53bf38a8b2", + "blk.25.attn_k.weight": "057a56bd8c8d2b41608d1f71faa3052902152ddf85e47669ad950c1c3e77c33f", + "blk.25.attn_norm.weight": "b7179fe02c334da556ddcf6c1b502245639a728c4cbba8b552d8e1df4565ee9d", + "blk.25.attn_output.weight": "4fed8b05b08a0ff75ffd022701bbeb52f17b23d09332a1ddcba737244bd0d3b0", + "blk.25.attn_q.weight": "c52e99f5d38bf7538d6106a0bbf38ac6dc6296bca9a3f849afa384ea67b4af01", + "blk.25.attn_v.weight": "c49c23d8e1cfa6a8eb971eb69942204890c6d7d830dc8774c84b108a80598912", + "blk.25.ffn_down.weight": "c08d4dc8412b19fdc870c164b83c341b236ec6fe7bb4a9bcfe0dc100faa20286", + "blk.25.ffn_gate.weight": "1a4cb3f36735d59181721471452807903006539e5e1b5ceb4f72d1d7ae134127", + "blk.25.ffn_norm.weight": "8fd6bd0dcec5198761525a36992a57c9ec5e9da60a22092839a84ae8c4e87f26", + "blk.25.ffn_up.weight": "3a00f39bdd5f31dc5e3b281d2002e1ac4f2475d49a0ac1d7720a25b377dcd04a", + "blk.25.post_attention_norm.weight": "e5f31a648612c859b6d21c9ee426e87a86cb1973dfdd86276c767371d9cef5ad", + "blk.25.post_ffw_norm.weight": "553c3bd774922c99c2384380a142d019881d30dbf0fe3bf9430dabfb3f6cbd33", + "output_norm.weight": "49445c4585ab0a8135717a0bdb1cda4a062a030177d0119561d91542aec5744b" +} diff --git a/convert/testdata/gemma-2-9b-it.json b/convert/testdata/gemma-2-9b-it.json new file mode 100644 index 0000000..90cdbee --- /dev/null +++ b/convert/testdata/gemma-2-9b-it.json @@ -0,0 +1,6 @@ +{ + "general.architecture": "gemma2", + "gemma2.attention.sliding_window": "4096", + "gemma2.attn_logit_softcapping": "50", + "gemma2.final_logit_softcapping": "30" +} diff --git a/convert/testdata/gemma-2b-it.json b/convert/testdata/gemma-2b-it.json new file mode 100644 index 0000000..0482f1e --- /dev/null +++ b/convert/testdata/gemma-2b-it.json @@ -0,0 +1,188 @@ +{ + "general.architecture": "gemma", + "general.file_type": "1", + "general.quantization_version": "2", + "gemma.block_count": "18", + "gemma.context_length": "8192", + "gemma.embedding_length": "2048", + "gemma.feed_forward_length": "16384", + "gemma.attention.head_count": "8", + "gemma.attention.head_count_kv": "1", + "gemma.attention.key_length": "256", + "gemma.attention.value_length": "256", + "gemma.attention.layer_norm_rms_epsilon": "1e-06", + "tokenizer.ggml.model": "llama", + "tokenizer.ggml.add_bos_token": "true", + "tokenizer.ggml.add_eos_token": "false", + "tokenizer.ggml.bos_token_id": "2", + "tokenizer.ggml.eos_token_id": "1", + "tokenizer.ggml.padding_token_id": "0", + "tokenizer.ggml.unknown_token_id": "3", + "tokenizer.ggml.scores": "0872465d173867d755d3ee728f882b9dc2057a0bfd596fe1e3d131522f1250d8", + "tokenizer.ggml.token_type": "485e40bf3d715a4764818fc097d6a2a41db872d82ee714bc500872a3437ff48d", + "tokenizer.ggml.tokens": "c6e66de1841f04de8b8d236d461ab720a4c9b9b5414dc293a09c6e10eab45fda", + "token_embd.weight": "17b87ab2c01c80657855a5413d0457b4a041afaeda0cc785080e44e2f04acf07", + "blk.0.attn_k.weight": "28ac0da05754ad2714ae95da28a5ad191192140b30b8fd22d108d4700c9d989f", + "blk.0.attn_norm.weight": "3f9d5675d1ab0eb8a816719dac9fab81f2e95c52be02c34263339acbc087febb", + "blk.0.attn_output.weight": "703295c2c63990ff896778685c678f145298886f680f3ed5dc2a7ad54c293265", + "blk.0.attn_q.weight": "69c2d0e4870e9d722a190d356203c9605575a16863466c3d1747966ef1cf5791", + "blk.0.attn_v.weight": "95219c9c07b5ffe9a9a01e456d845eef2b11f4fc12c93dbbba479db395444c13", + "blk.0.ffn_down.weight": "a2feb5eb3d572c57c5bafbf0ab506862df1160fe40965dcfe4b9fd855c08bed7", + "blk.0.ffn_gate.weight": "fcca072c445c31f4dc4d5dfaa785b1bdf7271342442099b74fd17268b5829fbf", + "blk.0.ffn_norm.weight": "7621f95dbd245cade6fffd6b08797d69d8e3954e960f0b5551b90d967ab95448", + "blk.0.ffn_up.weight": "14a9bcdd451403c67136391e1b6e53b3b1830f00199bd911dbcc56d8749c14f4", + "blk.1.attn_k.weight": "c70f73c5df20579cb44d971164b48b5f0d8d5abdb38b381e7a8b880ba12aa406", + "blk.1.attn_norm.weight": "88b6b91f93a1ef83425a7c7dc2a2fbd3b22704a04c64a80061df376ac8c33626", + "blk.1.attn_output.weight": "f031a537490c452be3b3bb51e6b7949a636405756e160976a1c070a792ea00ee", + "blk.1.attn_q.weight": "bdb23214b1cf9cfd30f863a0a5868e52c6809d93b7e8f44df096a94204d9896a", + "blk.1.attn_v.weight": "e9bbc0b05f2c872fb1403f8f938cd1612b502229ee401f12593b1164c61acc00", + "blk.1.ffn_down.weight": "5ff53811038b661a7b8f2bfdf213bebfb185ec1a6060b662f063714f33584d79", + "blk.1.ffn_gate.weight": "205085c8c951a5c7543b1495183cd96028fb49f67464b3e9862a2693a6077a33", + "blk.1.ffn_norm.weight": "798f354fc85afce9625f5d10093a585a966831698a0560e6c9b97ce659eb4b22", + "blk.1.ffn_up.weight": "db92dc5684cb6e90940e13f4d1da555ed20ba4f8cab1e990ddfd7553e2e91315", + "blk.2.attn_k.weight": "ef5ce360c4eed6d00d03ca4761e0f8e4b0af4509978468314be14f3d46621044", + "blk.2.attn_norm.weight": "6dadbc05dbd0d3fabb4216affa60a3de1378a82d2859dc90b338cbe70f50d455", + "blk.2.attn_output.weight": "6bbf87a966f691bbfd7c8d25629aa4e6710107bd431a667434861febb391edc5", + "blk.2.attn_q.weight": "4e575c09ae2de417ce9057ce8b073680e860a24aae13a472b68f101b760752e5", + "blk.2.attn_v.weight": "cd33f7f01141e9439afdaf2ea1aaced9feaa335e32a58daa136ebd555d4d96f4", + "blk.2.ffn_down.weight": "b970ff1b0b6494165defe2fbfa1d31425766ed71e64de9ec4e66ac3955c8bc5f", + "blk.2.ffn_gate.weight": "dbb3e1360402e0e369b101995bb686b73f95d4a7673f061be85d64d15dfb0061", + "blk.2.ffn_norm.weight": "bfb7980105d8ac9647710454f57a5cdac50598a0f6f4884e16f1d94b00844687", + "blk.2.ffn_up.weight": "50ef89339b275a438b664686f6227dd9b6e43853ed6856ec9e33ef4bbd90bda1", + "blk.3.attn_k.weight": "be942ea98151434eebcd2c1da4b00e0146152fe524a530689b1fd491cb833d21", + "blk.3.attn_norm.weight": "0df2f218daf609c289fb7c60c5f375fa99c0d4e04381ad5a494a19144edd8e20", + "blk.3.attn_output.weight": "c2184aaf86aa2cb8f47be49f60b165834e97205f39c6ee1dfd19fd4411a156ce", + "blk.3.attn_q.weight": "4f86e2a0a4221c1c84ff9c409ac89893cb95d7208cf65bf1e98e24e01125f991", + "blk.3.attn_v.weight": "abfdb8a60c349dadde641d1afc9542025e24fbf41a3238bfa9675e0b1f1e4b68", + "blk.3.ffn_down.weight": "58821a8d87008d47d122427911c6fad5272aca70c448bbae223256a74bacd07e", + "blk.3.ffn_gate.weight": "776e051f1a0ddd5c4934e69186683a75ca9a3c8c0f61911bba321fed1dd287d2", + "blk.3.ffn_norm.weight": "7f380f29335e28be90bfcfae6f6d69fdf5751211b36d2dd62aa5541ed113e4f2", + "blk.3.ffn_up.weight": "fc5ae8d488894cbd4951059675468d227da27871d26e925c9941863841c097ee", + "blk.4.attn_k.weight": "14833b078cc4c5137bdd5fdc0538047974ca147a99b0282e1b144440c78bc1db", + "blk.4.attn_norm.weight": "0a69957d4a15599fb80ad4753558020804925221457d9a5052926754d3768065", + "blk.4.attn_output.weight": "887a49b6130fb6297cf10767207c3dd97191b2cf63723449af9c27bca8dbeda0", + "blk.4.attn_q.weight": "51fd577b76764824dd6f0d4891c137ebe4736f591b5ca2793c5fff2be49abbde", + "blk.4.attn_v.weight": "1a623c43cf9c509d1b7ea0d1a5c04d0af4809665f9f9e93b7d6dba8c5df178fa", + "blk.4.ffn_down.weight": "5d61e8856d8941d2b1fd138116d015f63840d0fa1e31e20e20a5ceca1536ceec", + "blk.4.ffn_gate.weight": "06640f7273764f8ca5df7e386547417916b6cd7d565a8343153113239a94b0a1", + "blk.4.ffn_norm.weight": "91a6c6c41b894228e361435ecbc5058dca34d4911a23da5b56de219299c964d3", + "blk.4.ffn_up.weight": "d016dac1055e36d6a10b6317e57f98a904709ea892ef3194342f4d2f6326561e", + "blk.5.attn_k.weight": "987146afe124131500808cc0da33c06d207433656d41df6e6d8c99118a83bac5", + "blk.5.attn_norm.weight": "6b354938966f2608a2fb8d0f5b363ed0d8b0967c2ec8d0abd5c625b413042ded", + "blk.5.attn_output.weight": "cdcbfe02c6ff79d5326882b017a02099f5af71beedf6b1b3eb4de01e3a844536", + "blk.5.attn_q.weight": "b910d0cff781d3efb42eab0a302f46f286b2de717079175680d5b42bf8c309c8", + "blk.5.attn_v.weight": "66d3a279f747412f9f4b0e8abad44540c122ab2e811a7ee74c1f33bc36caade9", + "blk.5.ffn_down.weight": "c9b0efd2212981f16d956d8571f054b68780ad01f4917033647e359b557a4653", + "blk.5.ffn_gate.weight": "fe96b94109ca141c01f6a04788e20783019ca6ec334aa1f3134810bdb499e557", + "blk.5.ffn_norm.weight": "aa7b016e832e7055a36c6e20de58ea1936f995f390401fff1c5fc65906064e49", + "blk.5.ffn_up.weight": "555ce27c4873d3375394f38ad3b45e3d8848f9d5642dc1602383d0f0a33c2a14", + "blk.6.attn_k.weight": "88280d461db324c4f36475ce396793063e61a27283ec64511b0480890fb5b3b4", + "blk.6.attn_norm.weight": "af8f460c411f660d33196286d208f1845fd5a2b45f7b56549a4df31e7515447a", + "blk.6.attn_output.weight": "dd9996fb0a256e8375ad3917705258a33fce006bcea0f536caae420a77974d8b", + "blk.6.attn_q.weight": "7a4841541191e037cfb9b07930c4d8cab451809658b182f0ada6ccde9615c003", + "blk.6.attn_v.weight": "ae81e6a592b64d701a9d40233e986039a56cba8d8d24f61aea93c6393cf3078a", + "blk.6.ffn_down.weight": "622dd1ce1706355cbc659a8ab2c4509678ffe0f3ad34258e5e25ed2a5d951bcd", + "blk.6.ffn_gate.weight": "8389a735c0bd5591010f8ced9805a2a12c749f6df0d3c18ad4d05c2a302e7168", + "blk.6.ffn_norm.weight": "621f5346400382474d61358397bd58fb1459b07c53e376e4bca15e08b3f9b3fb", + "blk.6.ffn_up.weight": "8d834e4c42f13c251dfee36cf89e12f1bd400680d00d5c2e6cac0459e9ce2f7f", + "blk.7.attn_k.weight": "8bd0412de65a3e64901ef8fe6a28c95e116bf39dc9aa22f0126b9d36688e5ea7", + "blk.7.attn_norm.weight": "056d8e56be4e87d6dc6f900762f0dc6fde07bfdc50dd85bfc510415e2bba3f3d", + "blk.7.attn_output.weight": "27972eda51da53d416ff95aed78149a2c5a287b47d2cd46f2f544ca692ecb3bb", + "blk.7.attn_q.weight": "41eca977b9371f7932800c11a9c45b931310196919e2a0651b847703b180fc7f", + "blk.7.attn_v.weight": "13c74fd7e07f08883a09fb070a1fe5bbdd2341b4cb8d1cac07c4b637049b5774", + "blk.7.ffn_down.weight": "9e75db42468800849a9a7da603d0072c5e86c8ed2b4d8b20a312a51fb86a7a10", + "blk.7.ffn_gate.weight": "db6bdc3117f910088aaf7db51f2da63ea5bd933de36af5599c215bfb26f7db2b", + "blk.7.ffn_norm.weight": "48bb82b49bfc8679a1e77f282ee182d952db7a3c11be7ef9a102ee2ddd8011e2", + "blk.7.ffn_up.weight": "feebea87175817a0f3585ec0af09dc873d94c203581ae97a712eb356d3b49efe", + "blk.8.attn_k.weight": "d5640ad71b6af68d88e17bf8e7fc26c907d2262605457a84247dd9afc2884d69", + "blk.8.attn_norm.weight": "75b850c481a69083ae09d0207ba7317b37c735a39fcf5fef5400e6c84fb1257f", + "blk.8.attn_output.weight": "cbd669dbdea2bdd90f9f0cc97566b3dffff3c56cecb4f47290ceef30da83b2d6", + "blk.8.attn_q.weight": "9edcb63087a431bac361822497e6ecdaa06d9ea4a1a754e36da7ba9f8db81c7c", + "blk.8.attn_v.weight": "3fb72c2c4f95a83626aa3e30062f9450b09ab37c7871e229f18bbc5cf744633c", + "blk.8.ffn_down.weight": "bd69d2c9172974fff154441b237b4787fb53b2d185325442d5048130ef5bc4ef", + "blk.8.ffn_gate.weight": "d04689c80553edd011d1cbaa5d570fffa7fa91e88b66cf1352d89ab60b72f908", + "blk.8.ffn_norm.weight": "e49984183b735b7f2c4e4730c289eed9394056d2e283a00fd83ea0915df31a73", + "blk.8.ffn_up.weight": "8fe62a1ce8e847e567add6c6f6bf2922bc467495b5eb4c116b3cb85b85b3b211", + "blk.9.attn_k.weight": "d90904959e5004cf0d6e729c6bff18cc33c094798b802473c1ec55ab8d276183", + "blk.9.attn_norm.weight": "79277f290cc07411115d8fa138045edf4a17b3416ab2145409cbe8ab829fd4ee", + "blk.9.attn_output.weight": "5a21bf2e1f09a81405025f96d4153ffb630158e17269cff8ffff935c38ceb1a7", + "blk.9.attn_q.weight": "51b1d0febc3b350945be4504f55afa4347517bde0f710e1a4b88e6b17e71e7c7", + "blk.9.attn_v.weight": "aab7e1db0a8b50a03036356791ffce736ab010d15674c96eaef8049d80076054", + "blk.9.ffn_down.weight": "cbf43ec84becb40c9359a181ab0e641fd7faae7d34b549501f7cfb7afdc3d764", + "blk.9.ffn_gate.weight": "dce0e8661c778327bed7f03b6790d26710764188aed9dc746e6e05863891fa57", + "blk.9.ffn_norm.weight": "6d41642104f995c77bf31122b13237caebda3e7fcccb1367ce91db36b015e923", + "blk.9.ffn_up.weight": "82fe4c67bf24e7b2d6f6e05f7b1234c2bf90c3932951091a9066211b8e15ecbb", + "blk.10.attn_k.weight": "f6a9ed8fd8d3229b5d03175c413ffc56a07f2ce7236271986361dd3d8993f9aa", + "blk.10.attn_norm.weight": "cebbef89f0326ca8e02df3867a571e4d61c20c2a12f295f98ae590d62bc86010", + "blk.10.attn_output.weight": "34f5efb86accb4f06347d83a32558ea8eab3039d128969161a741ebacbb656ff", + "blk.10.attn_q.weight": "1e0efe27df2d5d50f7157253ba2cfd436d6781c3dc78ca176d0c16a210b5b763", + "blk.10.attn_v.weight": "8f085bf50a2b0f83cd6cdda3c8ef5a9e204a36348ed95871aac725d1f68640cf", + "blk.10.ffn_down.weight": "bf3b3cb4cace435809ac7b4cc933f20853af12f1f272d3dcefe7f19c0f203b8b", + "blk.10.ffn_gate.weight": "d3df7a1413b1c5adf1a1dcda9e5225a15c89874bae53bb6137ad1ea42fca2d34", + "blk.10.ffn_norm.weight": "a1da603b0480471b5ed8e862148cecd5fed918f8304d6933ab0bdb25b8d2fb8f", + "blk.10.ffn_up.weight": "bffbba605922e972dc47dda88a0b4659aa52236c76e5fe861a949e6d9a367492", + "blk.11.attn_k.weight": "9f31c63d66cd32c29b1eb8bb829d0c8525ce2ae936e0eefdaab6335a2d12a3df", + "blk.11.attn_norm.weight": "0bde1a266d8b2e8f202bb7e2e88b19147ca83021901f6d3cae77a4df5548c754", + "blk.11.attn_output.weight": "e10725c7cf746ed4a7e472cf7aea6cb564e5db6a1d5197adc980d650a387ccea", + "blk.11.attn_q.weight": "05ee758a7d065802630f8c65dca424364c1c8825e389aa33f9405c45e8a50cce", + "blk.11.attn_v.weight": "0c3ae7090f11775d24c51120db6e305db6aff706493e7ee123dcab74485ba789", + "blk.11.ffn_down.weight": "7ba40b8e12c09c5fb2006b77a771cb01ce894e88a3b3e1877f927a5b89c91709", + "blk.11.ffn_gate.weight": "db76388a023b98097972d354ba1c6a5e26efdeb1c596b9c28bf2cd8f6596975e", + "blk.11.ffn_norm.weight": "a38c3ae1b89a68ddc7b72c99c5b28be7fe3787c4fad9904d0c43d64eaf00c474", + "blk.11.ffn_up.weight": "13c8142f9cf1eddc658babf978daf3515c4ccc45f849f3e7e3930aa18a8480a0", + "blk.12.attn_k.weight": "f03241c36ac87cb57429a2ef22186b8d7d0b590a8b173beb01fa13d93772f3b1", + "blk.12.attn_norm.weight": "4568f654e6d65104d586e7c16ba960c83428698ce103022b7e0be15e2884e13b", + "blk.12.attn_output.weight": "04867603f82f91e41306e09b33ecda0104b3ee4834061f2c0bbdc8da33c72509", + "blk.12.attn_q.weight": "70fe04b9a8e08b6100cc8d6b58bf4cbbad15ca1de82d63baca5d352ba6c4cbae", + "blk.12.attn_v.weight": "15cb28db61a86c98687991d7e611bc92a1fcc6007f3432149cfb5fe518a4f65e", + "blk.12.ffn_down.weight": "6d10c790a4e3dc44c2dc36d96251ae97cdf30a4fa04d4c43e31bfbd038e6a7b7", + "blk.12.ffn_gate.weight": "3462a2d8f6b4743b25e24da51b90018ac2858d05ac7e582bcb69063cfdac1104", + "blk.12.ffn_norm.weight": "1f96392c1faa34e34ae5dea55a6a86c5aa4c79758952075d53d28de89dd88456", + "blk.12.ffn_up.weight": "d22eacc612a7411953d948483c5fb201e11722955ee0754da866e7bec578ac6d", + "blk.13.attn_k.weight": "5864977e6b733ea942647d6feed5c76156c48c200649c22e4e11b9e5860e57f3", + "blk.13.attn_norm.weight": "87e053535144723db4145aa5402acc54331b7696752d852bb9fc542ff33f0fb5", + "blk.13.attn_output.weight": "078145f5ad83f8b14f97a869346f7fd1583b24d1e3edadaa95d3da4242973f8f", + "blk.13.attn_q.weight": "3b8caf35504cbc4d1a7dd6e011a95760703b7f71e2218b030b1254f811362dd7", + "blk.13.attn_v.weight": "4fdf8365a603e043e5b40c4a21c84ac167f9be62794178f9d8a608dfe5653bf9", + "blk.13.ffn_down.weight": "a07d3abbfcacf48ba028df2cab895be32cc15022d23389a745286e79c1b1d1fd", + "blk.13.ffn_gate.weight": "1d2ab39666aa2909acc96787432a3ed13b19d25170f74665fadff9b17bbaffb1", + "blk.13.ffn_norm.weight": "4f2e809fda5f3eadf52578ee50e0ba36e53be91e55dce418c12dfe595f5f18e7", + "blk.13.ffn_up.weight": "8783d2720c2c37ca176a5801e0b3ef1f9cc9cf3ef1cd37af423aaf6b2a27e2bd", + "blk.14.attn_k.weight": "ce9428e2b55d43ae0c6690dbd56182f99adc427694ba8236b405cc8ea5035e86", + "blk.14.attn_norm.weight": "6abb35f9db8251d6ae954bda147c6ada2371b0574d11702e828f3c6ac99b7cc0", + "blk.14.attn_output.weight": "fe3880916d0ceb5bff672c88bbefb7060a545be609bf049beb2024b38221836d", + "blk.14.attn_q.weight": "7c8ad81be6f4a350931fd108b5f7c9e366e8c26ef62d1d85ffef5dca8fd893f8", + "blk.14.attn_v.weight": "e4bdedffacbebe38567a0734dfd67db90e911d9a9669fcde9a7c4ad8a0066c52", + "blk.14.ffn_down.weight": "ef6694dff1e05820aac0cd2b22f39ac7788b4967afc9250775575554c66aab2c", + "blk.14.ffn_gate.weight": "db63c4179e2db704bc505e2b4696e055b593e295a1b7c4c586fc793bdd5aab19", + "blk.14.ffn_norm.weight": "2796a62d832a9710148f95d533320492a33e712b2e5218659c548705bd11684d", + "blk.14.ffn_up.weight": "3f78c78d8c2d54df45f799d4ff902316628af296834afe4ceed63d4a324ff03e", + "blk.15.attn_k.weight": "6e810ee3859e07695645ee0c9a5efc7962668984a5f0a9325f47e462743b447c", + "blk.15.attn_norm.weight": "0956b576ae96db0b28cb09f761f801cfd9281432284664f0fe181c8d9c55d1ec", + "blk.15.attn_output.weight": "03a17f7e94208177aace5cc41b7f54670ba57873b7274ff6e23caf58cce110ca", + "blk.15.attn_q.weight": "b8edafe7d2216a6f8b4ae4905a906475490e6ea418f6e1d3cec563dbdc6fab91", + "blk.15.attn_v.weight": "f8ae8cae0f4cfa34a459824eba57350c3c248104ba5607e7d9dc7d7c39aaf4a6", + "blk.15.ffn_down.weight": "8d02eb439da852246d2ca67e9b7b6de0b090b80744355e64728a23e41926505b", + "blk.15.ffn_gate.weight": "ed5bf361c67db8731f186b775826f21c33bdb521111fd2d922539719a770239f", + "blk.15.ffn_norm.weight": "5942ca3c73209ac9a0c8bfd9b4aab7f7be7aee9aa12d9c35833493b44af76767", + "blk.15.ffn_up.weight": "f4bebf4ad99ec5f911327dec347be6c595814885309c7bc5647ce28c7f4d1cf5", + "blk.16.attn_k.weight": "756a534c19364448e0958b8948fe33891c6ccda0fbb4dfa2024e1f532a87804b", + "blk.16.attn_norm.weight": "386b7b9e4e6509f6af9c022d942b6c6c6cc136aeed8751ecb037c74d7c4bfb93", + "blk.16.attn_output.weight": "3ba1a766a25830b84d7c22178203635f9c5624caad290bc5e5d73da5d5e7a2ec", + "blk.16.attn_q.weight": "d39b0c91e1fda7685d50a0f7cc8d18c44b5bdc90a142c7fda0bc329cca1afa74", + "blk.16.attn_v.weight": "98b33fcb0ee3483cff1b06ecb44d7b7ffb4d34c268248e4d73dfdf82b2065b2f", + "blk.16.ffn_down.weight": "14006f5e4acb2f9416271ae562e299359cd2585739c7fc77ccbca54495563948", + "blk.16.ffn_gate.weight": "12f8abae2d301d8f88bedb6af98b1daecc7b0b8d05148594f931f30958d77aca", + "blk.16.ffn_norm.weight": "129a15a046ee96d06de288bd43c80f77a6b0fb3a159c7367154c6e4aaf362672", + "blk.16.ffn_up.weight": "b4a5911a45f3871ef1d4efb7dc7108645a564b70f818eccf45beebef2e844ee9", + "blk.17.attn_k.weight": "5e1bfcff0146ebdde3817b656952892eb671e14e75afc92fa53f84f8eecbec4c", + "blk.17.attn_norm.weight": "60bc988fab7c4b29ee9de599df41a8de00caa94fcd74677da011fac82f60f465", + "blk.17.attn_output.weight": "ba49b40d6a0b5685f749c24b0edbed3adc44dbe13b5d5e5fa1e56169fc746555", + "blk.17.attn_q.weight": "82bb415d24efcd14d03ace03f907bb70db6a204c76a0bdd1892e0fba165db87d", + "blk.17.attn_v.weight": "73dbe54beb91a899884e275ea81ffc5187a20cb7d5b68d5c299b783096999d94", + "blk.17.ffn_down.weight": "7c086166241e0664f8963fd1ca4ed74c737abfb2525ec20f8435821ff50158f3", + "blk.17.ffn_gate.weight": "51a32f78244d42a539f619c5ce661db9e6cf41636280a826d439b5444edcd28c", + "blk.17.ffn_norm.weight": "c4bb247fccd1ecc84875028af63dd20aaf5cbd17eb94a9bc36679c09285dccab", + "blk.17.ffn_up.weight": "b5886182790bc6fbadd63de9bc4ffee416f3b69a66280d197ab8c18edf769abf", + "output_norm.weight": "481f3097d0a20412e35b3a739b1b958487bcd41ff67744baa3c9acbddd2ee4d4" +} diff --git a/convert/tokenizer.go b/convert/tokenizer.go new file mode 100644 index 0000000..bedcd4f --- /dev/null +++ b/convert/tokenizer.go @@ -0,0 +1,331 @@ +package convert + +import ( + "crypto/sha256" + "encoding/hex" + "encoding/json" + "errors" + "fmt" + "io/fs" + "log/slog" + "os" + "slices" + "strings" + + "golang.org/x/exp/maps" +) + +const ( + _ int32 = iota + tokenTypeNormal + tokenTypeUnknown + tokenTypeControl + tokenTypeUserDefined + tokenTypeUnused + tokenTypeByte +) + +type Tokenizer struct { + *Vocabulary + SpecialVocabulary []*SpecialVocabulary + Merges []string + + Pre string + Template string +} + +func parseTokenizer(fsys fs.FS, specialTokenTypes []string) (*Tokenizer, error) { + v, err := parseVocabulary(fsys) + if err != nil { + return nil, err + } + + t := &Tokenizer{ + Vocabulary: v, + Pre: "default", + } + + addedTokens := make(map[string]token) + if f, err := fsys.Open("tokenizer.json"); errors.Is(err, os.ErrNotExist) { + } else if err != nil { + return nil, err + } else { + defer f.Close() + + var tt tokenizer + if err := json.NewDecoder(f).Decode(&tt); err != nil { + return nil, err + } + + for _, t := range tt.AddedTokens { + addedTokens[t.Content] = t + } + + if len(tt.Model.Merges) == 0 { + // noop; merges is empty + } else if err := json.Unmarshal(tt.Model.Merges, &t.Merges); err == nil { + // noop; merges is []string + } else if merges, err := func() ([][]string, error) { + var merges [][]string + if err := json.Unmarshal(tt.Model.Merges, &merges); err != nil { + return nil, err + } + + return merges, nil + }(); err == nil { + t.Merges = make([]string, len(merges)) + for i := range merges { + t.Merges[i] = strings.Join(merges[i], " ") + } + } else { + return nil, fmt.Errorf("could not parse tokenizer merges. expected []string or [][]string: %w", err) + } + + sha256sum := sha256.New() + for _, pt := range tt.PreTokenizer.PreTokenizers { + switch pt.Type { + case "Split": + if pt.Pattern.Regex != "" { + // create a checksum of all Split pretokenizers which should be sufficient + // to identify the pretokenizer + sha256sum.Write([]byte(pt.Pattern.Regex)) + } + } + } + + switch digest := hex.EncodeToString(sha256sum.Sum(nil)); digest { + case "d98f9631be1e9607a9848c26c1f9eac1aa9fc21ac6ba82a2fc0741af9780a48f": + t.Pre = "llama-bpe" + case "03df5c5863ad70781dcfdef491ead25140f895fe8010964be0daefe27be32b02": + t.Pre = "deepseek-llm" + case "21cde974d587f0d54dc8d56b183cc1e6239600172035c68fbd6d4b9f8da0576e": + t.Pre = "deepseek-coder" + case "1ff7f41064896984db5d1bb6ff64fa4bc29007d08c1b439e505b7392777a319e": + t.Pre = "qwen2" + case "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855": + // noop, empty pretokenizer + default: + slog.Warn("unknown pretokenizer, using default", "digest", digest) + } + } + + if f, err := fsys.Open("tokenizer_config.json"); errors.Is(err, os.ErrNotExist) { + // noop + } else if err != nil { + return nil, err + } else { + defer f.Close() + + var p map[string]json.RawMessage + if err := json.NewDecoder(f).Decode(&p); err != nil { + return nil, err + } + + if template, ok := p["chat_template"]; ok { + var s []struct { + Name string `json:"name"` + Template string `json:"template"` + } + if err := json.Unmarshal(template, &t.Template); err == nil { + // noop + } else if err := json.Unmarshal(template, &s); err == nil { + for _, e := range s { + if e.Name == "default" { + t.Template = e.Template + break + } + } + } else { + return nil, fmt.Errorf("invalid chat_template: %w", err) + } + } + + for _, st := range specialTokenTypes { + sv := SpecialVocabulary{Type: st} + if bts, ok := p[fmt.Sprintf("add_%s_token", st)]; ok { + if err := json.Unmarshal(bts, &sv.AddToken); err != nil { + return nil, err + } + } + + if bts, ok := p[fmt.Sprintf("%s_token", st)]; ok { + var content string + if err := json.Unmarshal(bts, &content); err != nil { + var mm map[string]any + if err := json.Unmarshal(bts, &mm); err != nil { + continue + } + + content, ok = mm["content"].(string) + if !ok { + continue + } + } + + sv.Content = content + } + + if id, ok := addedTokens[sv.Content]; ok { + sv.ID = id.ID + t.SpecialVocabulary = append(t.SpecialVocabulary, &sv) + } + } + } + + if f, err := fsys.Open("generation_config.json"); errors.Is(err, os.ErrNotExist) { + } else if err != nil { + return nil, err + } else { + defer f.Close() + + var p map[string]json.RawMessage + if err := json.NewDecoder(f).Decode(&p); err != nil { + return nil, err + } + + for _, st := range specialTokenTypes { + if bts, ok := p[fmt.Sprintf("%s_token_id", st)]; ok { + var ids []int32 + if err := json.Unmarshal(bts, &ids); err != nil { + // value is not a list so the existing ID is used + continue + } + + if i := slices.IndexFunc(t.SpecialVocabulary, func(sv *SpecialVocabulary) bool { + return sv.Type == st + }); i >= 0 { + t.SpecialVocabulary[i].IDs = ids + } + } + } + } + + return t, nil +} + +type tokenizer struct { + AddedTokens []token `json:"added_tokens"` + Model struct { + Type string `json:"type"` + Vocab map[string]int `json:"vocab"` + Merges json.RawMessage `json:"merges"` + } `json:"model"` + + PreTokenizer struct { + PreTokenizers []struct { + Type string `json:"type"` + Pattern struct { + Regex string `json:"Regex"` + } `json:"pattern"` + } `json:"pretokenizers"` + } `json:"pre_tokenizer"` +} + +type token struct { + ID int `json:"id"` + Content string `json:"content"` + Special bool `json:"special"` + UserDefined bool +} + +type Vocabulary struct { + Model string + Tokens []string + Scores []float32 + Types []int32 +} + +func parseVocabularyFromTokenizer(fsys fs.FS) (*Vocabulary, error) { + f, err := fsys.Open("tokenizer.json") + if err != nil { + return nil, err + } + defer f.Close() + + var t tokenizer + if err := json.NewDecoder(f).Decode(&t); err != nil { + return nil, err + } + + tokens := make(map[int]token, len(t.Model.Vocab)) + for k, v := range t.Model.Vocab { + tokens[v] = token{ + ID: v, + Content: k, + } + } + + for _, token := range t.AddedTokens { + token.UserDefined = true + tokens[token.ID] = token + } + + keys := maps.Keys(tokens) + slices.Sort(keys) + + v := Vocabulary{Model: "gpt2"} + for _, k := range keys { + token := tokens[k] + v.Tokens = append(v.Tokens, token.Content) + v.Scores = append(v.Scores, float32(token.ID)) + + switch { + case token.Special: + v.Types = append(v.Types, tokenTypeControl) + case token.UserDefined: + v.Types = append(v.Types, tokenTypeUserDefined) + default: + v.Types = append(v.Types, tokenTypeNormal) + } + } + + return &v, nil +} + +func parseVocabulary(fsys fs.FS) (*Vocabulary, error) { + patterns := []struct { + Pattern string + Func func(fs.FS) (*Vocabulary, error) + }{ + {"tokenizer.model", parseSentencePiece}, + {"tokenizer.json", parseVocabularyFromTokenizer}, + } + + for _, pattern := range patterns { + if _, err := fs.Stat(fsys, pattern.Pattern); errors.Is(err, os.ErrNotExist) { + continue + } else if err != nil { + return nil, err + } + + return pattern.Func(fsys) + } + + return nil, errors.New("unknown tokenizer format") +} + +type SpecialVocabulary struct { + Type string + ID int + Content string + AddToken bool + + // IDs is populated by generation_config.json + IDs []int32 +} + +func (sv SpecialVocabulary) Key() string { + switch t := sv.Type; t { + case "bos", "eos", "cls", "mask": + return t + case "unk": + return "unknown" + case "sep": + //nolint:misspell // this is an upstream typo + return "seperator" + case "pad": + return "padding" + } + + panic("unknown special vocabulary type") +} diff --git a/convert/tokenizer_spm.go b/convert/tokenizer_spm.go new file mode 100644 index 0000000..340c3d5 --- /dev/null +++ b/convert/tokenizer_spm.go @@ -0,0 +1,171 @@ +package convert + +import ( + "cmp" + "encoding/json" + "errors" + "fmt" + "io/fs" + "log/slog" + "os" + "reflect" + "slices" + + "google.golang.org/protobuf/proto" + + "github.com/ollama/ollama/convert/sentencepiece" +) + +func parseSentencePiece(fsys fs.FS) (*Vocabulary, error) { + slog.Debug("using spm vocabulary") + + ast, err := parseAdditionalSpecialTokens(fsys) + if err != nil { + return nil, err + } + + bts, err := fs.ReadFile(fsys, "tokenizer.model") + if err != nil { + return nil, err + } + + var spm sentencepiece.ModelProto + if err := proto.Unmarshal(bts, &spm); err != nil { + return nil, err + } + + v := Vocabulary{Model: "llama"} + for _, piece := range spm.GetPieces() { + v.Tokens = append(v.Tokens, piece.GetPiece()) + v.Scores = append(v.Scores, piece.GetScore()) + + switch t := piece.GetType(); t { + case sentencepiece.ModelProto_SentencePiece_UNKNOWN, + sentencepiece.ModelProto_SentencePiece_CONTROL, + sentencepiece.ModelProto_SentencePiece_UNUSED, + sentencepiece.ModelProto_SentencePiece_BYTE: + v.Types = append(v.Types, int32(t)) + default: + tt := int32(sentencepiece.ModelProto_SentencePiece_NORMAL) + + // temporary fix to handle gemma3 broken configs + if slices.Contains([]string{"", ""}, piece.GetPiece()) { + tt = int32(sentencepiece.ModelProto_SentencePiece_CONTROL) + } + + for _, t := range ast { + if t.Content == piece.GetPiece() { + tt = int32(sentencepiece.ModelProto_SentencePiece_CONTROL) + break + } + } + + v.Types = append(v.Types, tt) + } + } + + f, err := fsys.Open("added_tokens.json") + if errors.Is(err, os.ErrNotExist) { + return &v, nil + } else if err != nil { + return nil, err + } + defer f.Close() + + var atm map[string]int + if err := json.NewDecoder(f).Decode(&atm); err != nil { + return nil, err + } + + type t struct { + id int + content string + } + + var ts []t + for content, id := range atm { + ts = append(ts, t{id, content}) + } + + slices.SortFunc(ts, func(i, j t) int { + return cmp.Compare(i.id, j.id) + }) + + for _, t := range ts { + if t.id < len(v.Tokens) { + if v.Tokens[t.id] == t.content { + slog.Warn("tokenizer", "duplicate token", t.content, "id", t.id) + continue + } + return nil, fmt.Errorf("token mismatch: %s != %s at pos [%d]", t.content, v.Tokens[t.id], t.id) + } + if t.id != len(v.Tokens) { + return nil, fmt.Errorf("invalid token id: [%d] as pos [%d]", t.id, len(v.Tokens)) + } + + v.Tokens = append(v.Tokens, t.content) + v.Scores = append(v.Scores, -1000.0) + v.Types = append(v.Types, tokenTypeUserDefined) + } + + return &v, nil +} + +type specialToken struct { + Content string `json:"content"` + Lstrip bool `json:"lstrip"` + Normalized bool `json:"normalized"` + Rstrip bool `json:"rstrip"` + SingleWord bool `json:"single_word"` +} + +func parseAdditionalSpecialTokens(fsys fs.FS) ([]specialToken, error) { + f, err := fsys.Open("special_tokens_map.json") + if errors.Is(err, os.ErrNotExist) { + return nil, nil + } else if err != nil { + return nil, err + } + defer f.Close() + + var m struct { + AdditionalSpecialTokens any `json:"additional_special_tokens"` + } + + if err := json.NewDecoder(f).Decode(&m); err != nil { + return nil, err + } + + var ast []specialToken + + switch st := m.AdditionalSpecialTokens.(type) { + case []string: + for _, s := range st { + ast = append(ast, specialToken{Content: s}) + } + case []any: + for _, s := range st { + // marshal and unmarshal the object to get the special token + tMap := s.(map[string]any) + data, err := json.Marshal(tMap) + if err != nil { + return nil, err + } + + var token specialToken + err = json.Unmarshal(data, &token) + if err != nil { + return nil, err + } + + ast = append(ast, token) + } + + default: + slog.Warn("special token", "unknown token", reflect.TypeOf(st)) + } + + slog.Debug("spm tokenizer", "additional tokens", ast) + + return ast, nil +} diff --git a/convert/tokenizer_test.go b/convert/tokenizer_test.go new file mode 100644 index 0000000..813096f --- /dev/null +++ b/convert/tokenizer_test.go @@ -0,0 +1,325 @@ +package convert + +import ( + "io" + "io/fs" + "os" + "path/filepath" + "strings" + "testing" + + "github.com/google/go-cmp/cmp" +) + +func createTokenizerFS(t *testing.T, dir string, files map[string]io.Reader) fs.FS { + t.Helper() + + for k, v := range files { + if err := func() error { + f, err := os.Create(filepath.Join(dir, k)) + if err != nil { + return err + } + defer f.Close() + + if _, err := io.Copy(f, v); err != nil { + return err + } + + return nil + }(); err != nil { + t.Fatalf("unexpected error: %v", err) + } + } + + return os.DirFS(dir) +} + +func TestParseTokenizer(t *testing.T) { + cases := []struct { + name string + fsys fs.FS + specialTokenTypes []string + want *Tokenizer + }{ + { + name: "string chat template", + fsys: createTokenizerFS(t, t.TempDir(), map[string]io.Reader{ + "tokenizer.json": strings.NewReader(`{}`), + "tokenizer_config.json": strings.NewReader(`{ + "chat_template": "" + }`), + }), + want: &Tokenizer{ + Vocabulary: &Vocabulary{Model: "gpt2"}, + Pre: "default", + Template: "", + }, + }, + { + name: "list chat template", + fsys: createTokenizerFS(t, t.TempDir(), map[string]io.Reader{ + "tokenizer.json": strings.NewReader(`{}`), + "tokenizer_config.json": strings.NewReader(`{ + "chat_template": [ + { + "name": "default", + "template": "" + }, + { + "name": "tools", + "template": "" + } + ] + }`), + }), + want: &Tokenizer{ + Vocabulary: &Vocabulary{Model: "gpt2"}, + Pre: "default", + Template: "", + }, + }, + { + name: "added tokens", + fsys: createTokenizerFS(t, t.TempDir(), map[string]io.Reader{ + "tokenizer.json": strings.NewReader(`{ + "added_tokens": [ + { + "id": 999, + "content": "", + "special": false + } + ] + }`), + }), + want: &Tokenizer{ + Vocabulary: &Vocabulary{ + Model: "gpt2", + Tokens: []string{""}, + Scores: []float32{999}, + Types: []int32{4}, + }, + Pre: "default", + }, + }, + { + name: "added tokens overlap vocab", + fsys: createTokenizerFS(t, t.TempDir(), map[string]io.Reader{ + "tokenizer.json": strings.NewReader(`{ + "added_tokens": [ + { + "id": 0, + "content": "", + "special": true + } + ], + "model": { + "vocab": { + "": 0 + } + } + }`), + }), + want: &Tokenizer{ + Vocabulary: &Vocabulary{ + Model: "gpt2", + Tokens: []string{""}, + Scores: []float32{0}, + Types: []int32{3}, + }, + Pre: "default", + }, + }, + { + name: "special token types", + fsys: createTokenizerFS(t, t.TempDir(), map[string]io.Reader{ + "tokenizer.json": strings.NewReader(`{ + "added_tokens": [ + { + "id": 0, + "content": "", + "special": true + }, + { + "id": 1, + "content": "", + "special": true + }, + { + "id": 2, + "content": "", + "special": true + }, + { + "id": 3, + "content": "", + "special": true + } + ], + "model": { + "vocab": { + "": 0, + "": 1, + "": 2, + "": 3 + } + } + }`), + "tokenizer_config.json": strings.NewReader(`{ + "add_bos_token": true, + "add_eos_token": false, + "bos_token": "", + "eos_token": "", + "pad_token": "", + "unk_token": "" + }`), + }), + specialTokenTypes: []string{"pad", "eos", "bos", "unk"}, + want: &Tokenizer{ + Vocabulary: &Vocabulary{ + Model: "gpt2", + Tokens: []string{"", "", "", ""}, + Scores: []float32{0, 1, 2, 3}, + Types: []int32{3, 3, 3, 3}, + }, + SpecialVocabulary: []*SpecialVocabulary{ + {Type: "pad", Content: "", ID: 0, AddToken: false}, + {Type: "eos", Content: "", ID: 1, AddToken: false}, + {Type: "bos", Content: "", ID: 2, AddToken: true}, + {Type: "unk", Content: "", ID: 3, AddToken: false}, + }, + Pre: "default", + }, + }, + { + name: "list string merges", + fsys: createTokenizerFS(t, t.TempDir(), map[string]io.Reader{ + "tokenizer.json": strings.NewReader(`{ + "model": { + "merges": [ + "a b", + "c d", + "e f" + ] + } + }`), + }), + want: &Tokenizer{ + Vocabulary: &Vocabulary{ + Model: "gpt2", + }, + Merges: []string{ + "a b", + "c d", + "e f", + }, + Pre: "default", + }, + }, + { + name: "list list string merges", + fsys: createTokenizerFS(t, t.TempDir(), map[string]io.Reader{ + "tokenizer.json": strings.NewReader(`{ + "model": { + "merges": [ + [ + "a", "b" + ], + [ + "c", "d" + ], + [ + "e", "f" + ] + ] + } + }`), + }), + want: &Tokenizer{ + Vocabulary: &Vocabulary{ + Model: "gpt2", + }, + Merges: []string{ + "a b", + "c d", + "e f", + }, + Pre: "default", + }, + }, + { + name: "generation config eos token ids", + fsys: createTokenizerFS(t, t.TempDir(), map[string]io.Reader{ + "tokenizer.json": strings.NewReader(`{ + "added_tokens": [ + { + "id": 0, + "content": "", + "special": true + }, + { + "id": 1, + "content": "", + "special": true + }, + { + "id": 2, + "content": "", + "special": true + }, + { + "id": 3, + "content": "", + "special": true + } + ], + "model": { + "vocab": { + "": 0, + "": 1, + "": 2, + "": 3 + } + } + }`), + "tokenizer_config.json": strings.NewReader(`{ + "add_bos_token": true, + "add_eos_token": false, + "bos_token": "", + "eos_token": "" + }`), + "generation_config.json": strings.NewReader(`{ + "bos_token_id": 0, + "eos_token_id": [1, 2, 3] + }`), + }), + specialTokenTypes: []string{"pad", "eos", "bos", "unk"}, + want: &Tokenizer{ + Vocabulary: &Vocabulary{ + Model: "gpt2", + Tokens: []string{"", "", "", ""}, + Scores: []float32{0, 1, 2, 3}, + Types: []int32{3, 3, 3, 3}, + }, + SpecialVocabulary: []*SpecialVocabulary{ + {Type: "eos", Content: "", ID: 1, IDs: []int32{1, 2, 3}, AddToken: false}, + {Type: "bos", Content: "", ID: 0, AddToken: true}, + }, + Pre: "default", + }, + }, + } + + for _, tt := range cases { + t.Run(tt.name, func(t *testing.T) { + tokenizer, err := parseTokenizer(tt.fsys, tt.specialTokenTypes) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + + if diff := cmp.Diff(tt.want, tokenizer); diff != "" { + t.Errorf("unexpected tokenizer (-want +got):\n%s", diff) + } + }) + } +} diff --git a/discover/amd_common.go b/discover/amd_common.go new file mode 100644 index 0000000..08834b2 --- /dev/null +++ b/discover/amd_common.go @@ -0,0 +1,83 @@ +//go:build linux || windows + +package discover + +import ( + "errors" + "log/slog" + "os" + "path/filepath" + "runtime" + "strings" +) + +// Determine if the given ROCm lib directory is usable by checking for existence of some glob patterns +func rocmLibUsable(libDir string) bool { + slog.Debug("evaluating potential rocm lib dir " + libDir) + for _, g := range ROCmLibGlobs { + res, _ := filepath.Glob(filepath.Join(libDir, g)) + if len(res) == 0 { + return false + } + } + return true +} + +func GetSupportedGFX(libDir string) ([]string, error) { + var ret []string + files, err := filepath.Glob(filepath.Join(libDir, "rocblas", "library", "TensileLibrary_lazy_gfx*.dat")) + if err != nil { + return nil, err + } + for _, file := range files { + ret = append(ret, strings.TrimSuffix(strings.TrimPrefix(filepath.Base(file), "TensileLibrary_lazy_"), ".dat")) + } + return ret, nil +} + +func commonAMDValidateLibDir() (string, error) { + // Favor our bundled version + + // Installer payload location if we're running the installed binary + rocmTargetDir := filepath.Join(LibOllamaPath, "rocm") + if rocmLibUsable(rocmTargetDir) { + slog.Debug("detected ROCM next to ollama executable " + rocmTargetDir) + return rocmTargetDir, nil + } + + // Prefer explicit HIP env var + hipPath := os.Getenv("HIP_PATH") + if hipPath != "" { + hipLibDir := filepath.Join(hipPath, "bin") + if rocmLibUsable(hipLibDir) { + slog.Debug("detected ROCM via HIP_PATH=" + hipPath) + return hipLibDir, nil + } + } + + // Scan the LD_LIBRARY_PATH or PATH + pathEnv := "LD_LIBRARY_PATH" + if runtime.GOOS == "windows" { + pathEnv = "PATH" + } + + paths := os.Getenv(pathEnv) + for _, path := range filepath.SplitList(paths) { + d, err := filepath.Abs(path) + if err != nil { + continue + } + if rocmLibUsable(d) { + return d, nil + } + } + + // Well known location(s) + for _, path := range RocmStandardLocations { + if rocmLibUsable(path) { + return path, nil + } + } + + return "", errors.New("no suitable rocm found, falling back to CPU") +} diff --git a/discover/amd_hip_windows.go b/discover/amd_hip_windows.go new file mode 100644 index 0000000..bf19ef0 --- /dev/null +++ b/discover/amd_hip_windows.go @@ -0,0 +1,147 @@ +package discover + +import ( + "errors" + "fmt" + "log/slog" + "syscall" + "unsafe" + + "golang.org/x/sys/windows" +) + +const ( + hipSuccess = 0 + hipErrorNoDevice = 100 +) + +type hipDevicePropMinimal struct { + Name [256]byte + unused1 [140]byte + GcnArchName [256]byte // gfx#### + iGPU int // Doesn't seem to actually report correctly + unused2 [128]byte +} + +// Wrap the amdhip64.dll library for GPU discovery +type HipLib struct { + dll windows.Handle + hipGetDeviceCount uintptr + hipGetDeviceProperties uintptr + hipMemGetInfo uintptr + hipSetDevice uintptr + hipDriverGetVersion uintptr +} + +func NewHipLib() (*HipLib, error) { + // At runtime we depend on v6, so discover GPUs with the same library for a consistent set of GPUs + h, err := windows.LoadLibrary("amdhip64_6.dll") + if err != nil { + return nil, fmt.Errorf("unable to load amdhip64_6.dll, please make sure to upgrade to the latest amd driver: %w", err) + } + hl := &HipLib{} + hl.dll = h + hl.hipGetDeviceCount, err = windows.GetProcAddress(hl.dll, "hipGetDeviceCount") + if err != nil { + return nil, err + } + hl.hipGetDeviceProperties, err = windows.GetProcAddress(hl.dll, "hipGetDeviceProperties") + if err != nil { + return nil, err + } + hl.hipMemGetInfo, err = windows.GetProcAddress(hl.dll, "hipMemGetInfo") + if err != nil { + return nil, err + } + hl.hipSetDevice, err = windows.GetProcAddress(hl.dll, "hipSetDevice") + if err != nil { + return nil, err + } + hl.hipDriverGetVersion, err = windows.GetProcAddress(hl.dll, "hipDriverGetVersion") + if err != nil { + return nil, err + } + return hl, nil +} + +// The hip library only evaluates the ROCR_VISIBLE_DEVICES variable at startup +// so we have to unload/reset the library after we do our initial discovery +// to make sure our updates to that variable are processed by llama.cpp +func (hl *HipLib) Release() { + err := windows.FreeLibrary(hl.dll) + if err != nil { + slog.Warn("failed to unload amdhip64.dll", "error", err) + } + hl.dll = 0 +} + +func (hl *HipLib) AMDDriverVersion() (driverMajor, driverMinor int, err error) { + if hl.dll == 0 { + return 0, 0, errors.New("dll has been unloaded") + } + var version int + status, _, err := syscall.SyscallN(hl.hipDriverGetVersion, uintptr(unsafe.Pointer(&version))) + if status != hipSuccess { + return 0, 0, fmt.Errorf("failed call to hipDriverGetVersion: %d %s", status, err) + } + + slog.Debug("hipDriverGetVersion", "version", version) + driverMajor = version / 10000000 + driverMinor = (version - (driverMajor * 10000000)) / 100000 + + return driverMajor, driverMinor, nil +} + +func (hl *HipLib) HipGetDeviceCount() int { + if hl.dll == 0 { + slog.Error("dll has been unloaded") + return 0 + } + var count int + status, _, err := syscall.SyscallN(hl.hipGetDeviceCount, uintptr(unsafe.Pointer(&count))) + if status == hipErrorNoDevice { + slog.Info("AMD ROCm reports no devices found") + return 0 + } + if status != hipSuccess { + slog.Warn("failed call to hipGetDeviceCount", "status", status, "error", err) + } + return count +} + +func (hl *HipLib) HipSetDevice(device int) error { + if hl.dll == 0 { + return errors.New("dll has been unloaded") + } + status, _, err := syscall.SyscallN(hl.hipSetDevice, uintptr(device)) + if status != hipSuccess { + return fmt.Errorf("failed call to hipSetDevice: %d %s", status, err) + } + return nil +} + +func (hl *HipLib) HipGetDeviceProperties(device int) (*hipDevicePropMinimal, error) { + if hl.dll == 0 { + return nil, errors.New("dll has been unloaded") + } + var props hipDevicePropMinimal + status, _, err := syscall.SyscallN(hl.hipGetDeviceProperties, uintptr(unsafe.Pointer(&props)), uintptr(device)) + if status != hipSuccess { + return nil, fmt.Errorf("failed call to hipGetDeviceProperties: %d %s", status, err) + } + return &props, nil +} + +// free, total, err +func (hl *HipLib) HipMemGetInfo() (uint64, uint64, error) { + if hl.dll == 0 { + return 0, 0, errors.New("dll has been unloaded") + } + var totalMemory uint64 + var freeMemory uint64 + status, _, err := syscall.SyscallN(hl.hipMemGetInfo, uintptr(unsafe.Pointer(&freeMemory)), uintptr(unsafe.Pointer(&totalMemory))) + if status != hipSuccess { + return 0, 0, fmt.Errorf("failed call to hipMemGetInfo: %d %s", status, err) + } + return freeMemory, totalMemory, nil +} diff --git a/discover/amd_linux.go b/discover/amd_linux.go new file mode 100644 index 0000000..830fa1d --- /dev/null +++ b/discover/amd_linux.go @@ -0,0 +1,538 @@ +package discover + +import ( + "bufio" + "errors" + "fmt" + "io" + "io/fs" + "log/slog" + "os" + "path/filepath" + "regexp" + "slices" + "sort" + "strconv" + "strings" + + "github.com/ollama/ollama/envconfig" + "github.com/ollama/ollama/format" +) + +// Discovery logic for AMD/ROCm GPUs + +const ( + DriverVersionFile = "/sys/module/amdgpu/version" + AMDNodesSysfsDir = "/sys/class/kfd/kfd/topology/nodes/" + GPUPropertiesFileGlob = AMDNodesSysfsDir + "*/properties" + + // Prefix with the node dir + GPUTotalMemoryFileGlob = "mem_banks/*/properties" // size_in_bytes line + + // Direct Rendering Manager sysfs location + DRMDeviceDirGlob = "/sys/class/drm/card*/device" + DRMTotalMemoryFile = "mem_info_vram_total" + DRMUsedMemoryFile = "mem_info_vram_used" + + // In hex; properties file is in decimal + DRMUniqueIDFile = "unique_id" + DRMVendorFile = "vendor" + DRMDeviceFile = "device" +) + +var ( + // Used to validate if the given ROCm lib is usable + ROCmLibGlobs = []string{"libhipblas.so.2*", "rocblas"} // TODO - probably include more coverage of files here... + RocmStandardLocations = []string{"/opt/rocm/lib", "/usr/lib64"} +) + +// Gather GPU information from the amdgpu driver if any supported GPUs are detected +// Only called once during bootstrap +func AMDGetGPUInfo() ([]RocmGPUInfo, error) { + resp := []RocmGPUInfo{} + if !AMDDetected() { + return resp, fmt.Errorf("AMD GPUs not detected") + } + + // Opportunistic logging of driver version to aid in troubleshooting + driverMajor, driverMinor, err := AMDDriverVersion() + if err != nil { + // TODO - if we see users crash and burn with the upstreamed kernel this can be adjusted to hard-fail rocm support and fallback to CPU + slog.Warn("ollama recommends running the https://www.amd.com/en/support/linux-drivers", "error", err) + } + + // Determine if the user has already pre-selected which GPUs to look at, then ignore the others + var visibleDevices []string + hipVD := envconfig.HipVisibleDevices() // zero based index only + rocrVD := envconfig.RocrVisibleDevices() // zero based index or UUID + gpuDO := envconfig.GpuDeviceOrdinal() // zero based index + switch { + case rocrVD != "": + visibleDevices = strings.Split(rocrVD, ",") + case hipVD != "": + visibleDevices = strings.Split(hipVD, ",") + case gpuDO != "": + visibleDevices = strings.Split(gpuDO, ",") + } + + gfxOverride := envconfig.HsaOverrideGfxVersion() + var supported []string + var libDir string + + // The amdgpu driver always exposes the host CPU(s) first, but we have to skip them and subtract + // from the other IDs to get alignment with the HIP libraries expectations (zero is the first GPU, not the CPU) + matches, _ := filepath.Glob(GPUPropertiesFileGlob) + sort.Slice(matches, func(i, j int) bool { + // /sys/class/kfd/kfd/topology/nodes//properties + a, err := strconv.ParseInt(filepath.Base(filepath.Dir(matches[i])), 10, 64) + if err != nil { + slog.Debug("parse err", "error", err, "match", matches[i]) + return false + } + b, err := strconv.ParseInt(filepath.Base(filepath.Dir(matches[j])), 10, 64) + if err != nil { + slog.Debug("parse err", "error", err, "match", matches[i]) + return false + } + return a < b + }) + gpuCount := 0 + for _, match := range matches { + slog.Debug("evaluating amdgpu node " + match) + fp, err := os.Open(match) + if err != nil { + slog.Debug("failed to open sysfs node", "file", match, "error", err) + continue + } + defer fp.Close() + + scanner := bufio.NewScanner(fp) + isCPU := false + var major, minor, patch uint64 + var vendor, device, uniqueID uint64 + for scanner.Scan() { + line := strings.TrimSpace(scanner.Text()) + // Note: we could also use "cpu_cores_count X" where X is greater than zero to detect CPUs + if strings.HasPrefix(line, "gfx_target_version") { + ver := strings.Fields(line) + + // Detect CPUs + if len(ver) == 2 && ver[1] == "0" { + slog.Debug("detected CPU " + match) + isCPU = true + break + } + + if len(ver) != 2 || len(ver[1]) < 5 { + slog.Warn("malformed "+match, "gfx_target_version", line) + // If this winds up being a CPU, our offsets may be wrong + continue + } + l := len(ver[1]) + var err1, err2, err3 error + patch, err1 = strconv.ParseUint(ver[1][l-2:l], 10, 32) + minor, err2 = strconv.ParseUint(ver[1][l-4:l-2], 10, 32) + major, err3 = strconv.ParseUint(ver[1][:l-4], 10, 32) + if err1 != nil || err2 != nil || err3 != nil { + slog.Debug("malformed int " + line) + continue + } + } else if strings.HasPrefix(line, "vendor_id") { + ver := strings.Fields(line) + if len(ver) != 2 { + slog.Debug("malformed", "vendor_id", line) + continue + } + vendor, err = strconv.ParseUint(ver[1], 10, 64) + if err != nil { + slog.Debug("malformed", "vendor_id", line, "error", err) + } + } else if strings.HasPrefix(line, "device_id") { + ver := strings.Fields(line) + if len(ver) != 2 { + slog.Debug("malformed", "device_id", line) + continue + } + device, err = strconv.ParseUint(ver[1], 10, 64) + if err != nil { + slog.Debug("malformed", "device_id", line, "error", err) + } + } else if strings.HasPrefix(line, "unique_id") { + ver := strings.Fields(line) + if len(ver) != 2 { + slog.Debug("malformed", "unique_id", line) + continue + } + uniqueID, err = strconv.ParseUint(ver[1], 10, 64) + if err != nil { + slog.Debug("malformed", "unique_id", line, "error", err) + } + } + // TODO - any other properties we want to extract and record? + // vendor_id + device_id -> pci lookup for "Name" + // Other metrics that may help us understand relative performance between multiple GPUs + } + + // Note: while ./mem_banks/*/used_memory exists, it doesn't appear to take other VRAM consumers + // into consideration, so we instead map the device over to the DRM driver sysfs nodes which + // do reliably report VRAM usage. + + if isCPU { + continue + } + + // Skip over any GPUs that are masked + if major == 0 && minor == 0 && patch == 0 { + slog.Debug("skipping gpu with gfx000") + continue + } + + // Keep track of numeric IDs based on valid GPUs + gpuID := gpuCount + gpuCount += 1 + + // Look up the memory for the current node + totalMemory := uint64(0) + usedMemory := uint64(0) + var usedFile string + mapping := []struct { + id uint64 + filename string + }{ + {vendor, DRMVendorFile}, + {device, DRMDeviceFile}, + {uniqueID, DRMUniqueIDFile}, // Not all devices will report this + } + slog.Debug("mapping amdgpu to drm sysfs nodes", "amdgpu", match, "vendor", vendor, "device", device, "unique_id", uniqueID) + // Map over to DRM location to find the total/free memory + drmMatches, _ := filepath.Glob(DRMDeviceDirGlob) + for _, devDir := range drmMatches { + matched := true + for _, m := range mapping { + if m.id == 0 { + // Null ID means it didn't populate, so we can't use it to match + continue + } + filename := filepath.Join(devDir, m.filename) + buf, err := os.ReadFile(filename) + if err != nil { + slog.Debug("failed to read sysfs node", "file", filename, "error", err) + matched = false + break + } + // values here are in hex, strip off the lead 0x and parse so we can compare the numeric (decimal) values in amdgpu + cmp, err := strconv.ParseUint(strings.TrimPrefix(strings.TrimSpace(string(buf)), "0x"), 16, 64) + if err != nil { + slog.Debug("failed to parse sysfs node", "file", filename, "error", err) + matched = false + break + } + if cmp != m.id { + matched = false + break + } + } + if !matched { + continue + } + + // Found the matching DRM directory + slog.Debug("matched", "amdgpu", match, "drm", devDir) + totalFile := filepath.Join(devDir, DRMTotalMemoryFile) + buf, err := os.ReadFile(totalFile) + if err != nil { + slog.Debug("failed to read sysfs node", "file", totalFile, "error", err) + break + } + totalMemory, err = strconv.ParseUint(strings.TrimSpace(string(buf)), 10, 64) + if err != nil { + slog.Debug("failed to parse sysfs node", "file", totalFile, "error", err) + break + } + + usedFile = filepath.Join(devDir, DRMUsedMemoryFile) + usedMemory, err = getFreeMemory(usedFile) + if err != nil { + slog.Debug("failed to update used memory", "error", err) + } + break + } + + var name string + // TODO - PCI ID lookup + if vendor > 0 && device > 0 { + name = fmt.Sprintf("%04x:%04x", vendor, device) + } + + // Favor UUIDs if available to reduce possibility of getting the numeric IDs wrong + var ID string + if uniqueID != 0 { + ID = fmt.Sprintf("GPU-%016x", uniqueID) + } else { + ID = strconv.Itoa(gpuID) + } + + gpuInfo := RocmGPUInfo{ + GpuInfo: GpuInfo{ + Library: "rocm", + memInfo: memInfo{ + TotalMemory: totalMemory, + FreeMemory: (totalMemory - usedMemory), + }, + ID: ID, + Name: name, + Compute: fmt.Sprintf("gfx%d%x%x", major, minor, patch), + MinimumMemory: rocmMinimumMemory, + DriverMajor: driverMajor, + DriverMinor: driverMinor, + }, + usedFilepath: usedFile, + index: gpuID, + } + + // iGPU detection, remove this check once we can support an iGPU variant of the rocm library + if totalMemory < IGPUMemLimit { + reason := "unsupported Radeon iGPU detected skipping" + slog.Info(reason, "id", gpuID, "total", format.HumanBytes2(totalMemory)) + unsupportedGPUs = append(unsupportedGPUs, UnsupportedGPUInfo{ + GpuInfo: gpuInfo.GpuInfo, + Reason: reason, + }) + continue + } + minVer, err := strconv.Atoi(RocmComputeMajorMin) + if err != nil { + slog.Error("invalid RocmComputeMajorMin setting", "value", RocmComputeMajorMin, "error", err) + } + if int(major) < minVer { + reason := fmt.Sprintf("amdgpu too old gfx%d%x%x", major, minor, patch) + slog.Warn(reason, "gpu", gpuID) + unsupportedGPUs = append(unsupportedGPUs, UnsupportedGPUInfo{ + GpuInfo: gpuInfo.GpuInfo, + Reason: reason, + }) + + continue + } + + slog.Debug("amdgpu memory", "gpu", gpuID, "total", format.HumanBytes2(totalMemory)) + slog.Debug("amdgpu memory", "gpu", gpuID, "available", format.HumanBytes2(totalMemory-usedMemory)) + + // If the user wants to filter to a subset of devices, filter out if we aren't a match + if len(visibleDevices) > 0 { + include := false + for _, visible := range visibleDevices { + if visible == gpuInfo.ID || visible == strconv.Itoa(gpuInfo.index) { + include = true + break + } + } + if !include { + reason := "filtering out device per user request" + slog.Info(reason, "id", gpuInfo.ID, "visible_devices", visibleDevices) + unsupportedGPUs = append(unsupportedGPUs, UnsupportedGPUInfo{ + GpuInfo: gpuInfo.GpuInfo, + Reason: reason, + }) + + continue + } + } + + // Final validation is gfx compatibility - load the library if we haven't already loaded it + // even if the user overrides, we still need to validate the library + if libDir == "" { + libDir, err = AMDValidateLibDir() + if err != nil { + err = fmt.Errorf("unable to verify rocm library: %w", err) + slog.Warn(err.Error()) + unsupportedGPUs = append(unsupportedGPUs, UnsupportedGPUInfo{ + GpuInfo: gpuInfo.GpuInfo, + Reason: err.Error(), + }) + return nil, err + } + } + gpuInfo.DependencyPath = []string{libDir} + + if gfxOverride == "" { + // Only load supported list once + if len(supported) == 0 { + supported, err = GetSupportedGFX(libDir) + if err != nil { + err = fmt.Errorf("failed to lookup supported GFX types: %w", err) + slog.Warn(err.Error()) + unsupportedGPUs = append(unsupportedGPUs, UnsupportedGPUInfo{ + GpuInfo: gpuInfo.GpuInfo, + Reason: err.Error(), + }) + return nil, err + } + slog.Debug("rocm supported GPUs", "types", supported) + } + gfx := gpuInfo.Compute + if !slices.Contains[[]string, string](supported, gfx) { + reason := fmt.Sprintf("amdgpu is not supported (supported types:%s)", supported) + slog.Warn(reason, "gpu_type", gfx, "gpu", gpuInfo.ID, "library", libDir) + unsupportedGPUs = append(unsupportedGPUs, UnsupportedGPUInfo{ + GpuInfo: gpuInfo.GpuInfo, + Reason: reason, + }) + + // TODO - consider discrete markdown just for ROCM troubleshooting? + slog.Warn("See https://github.com/ollama/ollama/blob/main/docs/gpu.md#overrides for HSA_OVERRIDE_GFX_VERSION usage") + continue + } else { + slog.Info("amdgpu is supported", "gpu", gpuInfo.ID, "gpu_type", gfx) + } + } else { + slog.Info("skipping rocm gfx compatibility check", "HSA_OVERRIDE_GFX_VERSION", gfxOverride) + } + + // Check for env var workarounds + if name == "1002:687f" { // Vega RX 56 + gpuInfo.EnvWorkarounds = append(gpuInfo.EnvWorkarounds, [2]string{"HSA_ENABLE_SDMA", "0"}) + } + + // The GPU has passed all the verification steps and is supported + resp = append(resp, gpuInfo) + } + if len(resp) == 0 { + err := fmt.Errorf("no compatible amdgpu devices detected") + slog.Info(err.Error()) + return nil, err + } + if err := verifyKFDDriverAccess(); err != nil { + err = fmt.Errorf("amdgpu devices detected but permission problems block access: %w", err) + slog.Error(err.Error()) + return nil, err + } + return resp, nil +} + +// Quick check for AMD driver so we can skip amdgpu discovery if not present +func AMDDetected() bool { + // Some driver versions (older?) don't have a version file, so just lookup the parent dir + sysfsDir := filepath.Dir(DriverVersionFile) + _, err := os.Stat(sysfsDir) + if errors.Is(err, os.ErrNotExist) { + slog.Debug("amdgpu driver not detected " + sysfsDir) + return false + } else if err != nil { + slog.Debug("error looking up amd driver", "path", sysfsDir, "error", err) + return false + } + return true +} + +// Prefer to use host installed ROCm, as long as it meets our minimum requirements +// failing that, tell the user how to download it on their own +func AMDValidateLibDir() (string, error) { + libDir, err := commonAMDValidateLibDir() + if err == nil { + return libDir, nil + } + + // Well known ollama installer path + installedRocmDir := "/usr/share/ollama/lib/rocm" + if rocmLibUsable(installedRocmDir) { + return installedRocmDir, nil + } + + // If we still haven't found a usable rocm, the user will have to install it on their own + slog.Warn("amdgpu detected, but no compatible rocm library found. Either install rocm v6, or follow manual install instructions at https://github.com/ollama/ollama/blob/main/docs/linux.md#manual-install") + return "", errors.New("no suitable rocm found, falling back to CPU") +} + +func AMDDriverVersion() (driverMajor, driverMinor int, err error) { + _, err = os.Stat(DriverVersionFile) + if err != nil { + return 0, 0, fmt.Errorf("amdgpu version file missing: %s %w", DriverVersionFile, err) + } + fp, err := os.Open(DriverVersionFile) + if err != nil { + return 0, 0, err + } + defer fp.Close() + verString, err := io.ReadAll(fp) + if err != nil { + return 0, 0, err + } + + pattern := `\A(\d+)\.(\d+).*` + regex := regexp.MustCompile(pattern) + match := regex.FindStringSubmatch(string(verString)) + if len(match) < 2 { + return 0, 0, fmt.Errorf("malformed version string %s", string(verString)) + } + driverMajor, err = strconv.Atoi(match[1]) + if err != nil { + return 0, 0, err + } + driverMinor, err = strconv.Atoi(match[2]) + if err != nil { + return 0, 0, err + } + return driverMajor, driverMinor, nil +} + +func (gpus RocmGPUInfoList) RefreshFreeMemory() error { + if len(gpus) == 0 { + return nil + } + for i := range gpus { + usedMemory, err := getFreeMemory(gpus[i].usedFilepath) + if err != nil { + return err + } + slog.Debug("updating rocm free memory", "gpu", gpus[i].ID, "name", gpus[i].Name, "before", format.HumanBytes2(gpus[i].FreeMemory), "now", format.HumanBytes2(gpus[i].TotalMemory-usedMemory)) + gpus[i].FreeMemory = gpus[i].TotalMemory - usedMemory + } + return nil +} + +func getFreeMemory(usedFile string) (uint64, error) { + buf, err := os.ReadFile(usedFile) + if err != nil { + return 0, fmt.Errorf("failed to read sysfs node %s %w", usedFile, err) + } + usedMemory, err := strconv.ParseUint(strings.TrimSpace(string(buf)), 10, 64) + if err != nil { + slog.Debug("failed to parse sysfs node", "file", usedFile, "error", err) + return 0, fmt.Errorf("failed to parse sysfs node %s %w", usedFile, err) + } + return usedMemory, nil +} + +func verifyKFDDriverAccess() error { + // Verify we have permissions - either running as root, or we have group access to the driver + fd, err := os.OpenFile("/dev/kfd", os.O_RDWR, 0o666) + if err != nil { + if errors.Is(err, fs.ErrPermission) { + return fmt.Errorf("permissions not set up properly. Either run ollama as root, or add you user account to the render group. %w", err) + } else if errors.Is(err, fs.ErrNotExist) { + // Container runtime failure? + return fmt.Errorf("kfd driver not loaded. If running in a container, remember to include '--device /dev/kfd --device /dev/dri'") + } + return fmt.Errorf("failed to check permission on /dev/kfd: %w", err) + } + fd.Close() + return nil +} + +func rocmGetVisibleDevicesEnv(gpuInfo []GpuInfo) (string, string) { + ids := []string{} + for _, info := range gpuInfo { + if info.Library != "rocm" { + // TODO shouldn't happen if things are wired correctly... + slog.Debug("rocmGetVisibleDevicesEnv skipping over non-rocm device", "library", info.Library) + continue + } + ids = append(ids, info.ID) + } + // There are 3 potential env vars to use to select GPUs. + // ROCR_VISIBLE_DEVICES supports UUID or numeric so is our preferred on linux + // GPU_DEVICE_ORDINAL supports numeric IDs only + // HIP_VISIBLE_DEVICES supports numeric IDs only + return "ROCR_VISIBLE_DEVICES", strings.Join(ids, ",") +} diff --git a/discover/amd_windows.go b/discover/amd_windows.go new file mode 100644 index 0000000..0659d12 --- /dev/null +++ b/discover/amd_windows.go @@ -0,0 +1,218 @@ +package discover + +import ( + "bytes" + "errors" + "fmt" + "log/slog" + "path/filepath" + "slices" + "strconv" + "strings" + + "github.com/ollama/ollama/envconfig" + "github.com/ollama/ollama/format" +) + +const ( + + // TODO We're lookinng for this exact name to detect iGPUs since hipGetDeviceProperties never reports integrated==true + iGPUName = "AMD Radeon(TM) Graphics" +) + +var ( + // Used to validate if the given ROCm lib is usable + ROCmLibGlobs = []string{"hipblas.dll", "rocblas"} // This is not sufficient to discern v5 vs v6 + RocmStandardLocations = []string{"C:\\Program Files\\AMD\\ROCm\\6.1\\bin"} // TODO glob? +) + +// Only called once during bootstrap +func AMDGetGPUInfo() ([]RocmGPUInfo, error) { + resp := []RocmGPUInfo{} + hl, err := NewHipLib() + if err != nil { + slog.Debug(err.Error()) + return nil, err + } + defer hl.Release() + + driverMajor, driverMinor, err := hl.AMDDriverVersion() + if err != nil { + // For now this is benign, but we may eventually need to fail compatibility checks + slog.Debug("error looking up amd driver version", "error", err) + } + + // Note: the HIP library automatically handles subsetting to any *_VISIBLE_DEVICES the user specified + count := hl.HipGetDeviceCount() + if count == 0 { + err := fmt.Errorf("no compatible amdgpu devices detected") + slog.Info(err.Error()) + return nil, err + } + + libDir, err := AMDValidateLibDir() + if err != nil { + err = fmt.Errorf("unable to verify rocm library: %w", err) + slog.Warn(err.Error()) + return nil, err + } + + var supported []string + gfxOverride := envconfig.HsaOverrideGfxVersion() + if gfxOverride == "" { + supported, err = GetSupportedGFX(libDir) + if err != nil { + err = fmt.Errorf("failed to lookup supported GFX types: %w", err) + slog.Warn(err.Error()) + return nil, err + } + } else { + slog.Info("skipping rocm gfx compatibility check", "HSA_OVERRIDE_GFX_VERSION", gfxOverride) + } + + slog.Debug("detected hip devices", "count", count) + // TODO how to determine the underlying device ID when visible devices is causing this to subset? + for i := range count { + err = hl.HipSetDevice(i) + if err != nil { + slog.Warn("set device", "id", i, "error", err) + continue + } + + props, err := hl.HipGetDeviceProperties(i) + if err != nil { + slog.Warn("get properties", "id", i, "error", err) + continue + } + n := bytes.IndexByte(props.Name[:], 0) + name := string(props.Name[:n]) + // TODO is UUID actually populated on windows? + // Can luid be used on windows for setting visible devices (and is it actually set?) + n = bytes.IndexByte(props.GcnArchName[:], 0) + gfx := string(props.GcnArchName[:n]) + slog.Debug("hip device", "id", i, "name", name, "gfx", gfx) + // slog.Info(fmt.Sprintf("[%d] Integrated: %d", i, props.iGPU)) // DOESN'T REPORT CORRECTLY! Always 0 + // TODO Why isn't props.iGPU accurate!? + + freeMemory, totalMemory, err := hl.HipMemGetInfo() + if err != nil { + slog.Warn("get mem info", "id", i, "error", err) + continue + } + + gpuInfo := RocmGPUInfo{ + GpuInfo: GpuInfo{ + Library: "rocm", + memInfo: memInfo{ + TotalMemory: totalMemory, + FreeMemory: freeMemory, + }, + // Free memory reporting on Windows is not reliable until we bump to ROCm v6.2 + UnreliableFreeMemory: true, + + ID: strconv.Itoa(i), // TODO this is probably wrong if we specify visible devices + DependencyPath: []string{libDir}, + MinimumMemory: rocmMinimumMemory, + Name: name, + Compute: gfx, + DriverMajor: driverMajor, + DriverMinor: driverMinor, + }, + index: i, + } + + // iGPU detection, remove this check once we can support an iGPU variant of the rocm library + if strings.EqualFold(name, iGPUName) || totalMemory < IGPUMemLimit { + reason := "unsupported Radeon iGPU detected skipping" + slog.Info(reason, "id", gpuInfo.ID, "total", format.HumanBytes2(totalMemory)) + unsupportedGPUs = append(unsupportedGPUs, UnsupportedGPUInfo{ + GpuInfo: gpuInfo.GpuInfo, + Reason: reason, + }) + continue + } + + // Strip off Target Features when comparing + if !slices.Contains[[]string, string](supported, strings.Split(gfx, ":")[0]) { + reason := fmt.Sprintf("amdgpu is not supported (supported types:%s)", supported) + slog.Warn(reason, "gpu_type", gfx, "gpu", gpuInfo.ID, "library", libDir) + unsupportedGPUs = append(unsupportedGPUs, UnsupportedGPUInfo{ + GpuInfo: gpuInfo.GpuInfo, + Reason: reason, + }) + // HSA_OVERRIDE_GFX_VERSION not supported on windows + continue + } else { + slog.Debug("amdgpu is supported", "gpu", i, "gpu_type", gfx) + } + + slog.Debug("amdgpu memory", "gpu", i, "total", format.HumanBytes2(totalMemory)) + slog.Debug("amdgpu memory", "gpu", i, "available", format.HumanBytes2(freeMemory)) + + resp = append(resp, gpuInfo) + } + + return resp, nil +} + +func AMDValidateLibDir() (string, error) { + libDir, err := commonAMDValidateLibDir() + if err == nil { + return libDir, nil + } + + // Installer payload (if we're running from some other location) + rocmTargetDir := filepath.Join(LibOllamaPath, "rocm") + if rocmLibUsable(rocmTargetDir) { + slog.Debug("detected ollama installed ROCm at " + rocmTargetDir) + return rocmTargetDir, nil + } + + // Should not happen on windows since we include it in the installer, but stand-alone binary might hit this + slog.Warn("amdgpu detected, but no compatible rocm library found. Please install ROCm") + return "", errors.New("no suitable rocm found, falling back to CPU") +} + +func (gpus RocmGPUInfoList) RefreshFreeMemory() error { + if len(gpus) == 0 { + return nil + } + hl, err := NewHipLib() + if err != nil { + slog.Debug(err.Error()) + return err + } + defer hl.Release() + + for i := range gpus { + err := hl.HipSetDevice(gpus[i].index) + if err != nil { + return err + } + freeMemory, _, err := hl.HipMemGetInfo() + if err != nil { + slog.Warn("get mem info", "id", i, "error", err) + continue + } + slog.Debug("updating rocm free memory", "gpu", gpus[i].ID, "name", gpus[i].Name, "before", format.HumanBytes2(gpus[i].FreeMemory), "now", format.HumanBytes2(freeMemory)) + gpus[i].FreeMemory = freeMemory + } + return nil +} + +func rocmGetVisibleDevicesEnv(gpuInfo []GpuInfo) (string, string) { + ids := []string{} + for _, info := range gpuInfo { + if info.Library != "rocm" { + // TODO shouldn't happen if things are wired correctly... + slog.Debug("rocmGetVisibleDevicesEnv skipping over non-rocm device", "library", info.Library) + continue + } + ids = append(ids, info.ID) + } + // There are 3 potential env vars to use to select GPUs. + // ROCR_VISIBLE_DEVICES supports UUID or numeric but does not work on Windows + // HIP_VISIBLE_DEVICES supports numeric IDs only + // GPU_DEVICE_ORDINAL supports numeric IDs only + return "HIP_VISIBLE_DEVICES", strings.Join(ids, ",") +} diff --git a/discover/cpu_common.go b/discover/cpu_common.go new file mode 100644 index 0000000..2b9f729 --- /dev/null +++ b/discover/cpu_common.go @@ -0,0 +1,24 @@ +package discover + +import ( + "os" + "path/filepath" + "runtime" + "strings" +) + +func IsNUMA() bool { + if runtime.GOOS != "linux" { + // numa support in llama.cpp is linux only + return false + } + ids := map[string]any{} + packageIds, _ := filepath.Glob("/sys/devices/system/cpu/cpu*/topology/physical_package_id") + for _, packageId := range packageIds { + id, err := os.ReadFile(packageId) + if err == nil { + ids[strings.TrimSpace(string(id))] = struct{}{} + } + } + return len(ids) > 1 +} diff --git a/discover/cuda_common.go b/discover/cuda_common.go new file mode 100644 index 0000000..0482952 --- /dev/null +++ b/discover/cuda_common.go @@ -0,0 +1,65 @@ +//go:build linux || windows + +package discover + +import ( + "log/slog" + "os" + "regexp" + "runtime" + "strconv" + "strings" +) + +// Jetson devices have JETSON_JETPACK="x.y.z" factory set to the Jetpack version installed. +// Included to drive logic for reducing Ollama-allocated overhead on L4T/Jetson devices. +var CudaTegra string = os.Getenv("JETSON_JETPACK") + +func cudaGetVisibleDevicesEnv(gpuInfo []GpuInfo) (string, string) { + ids := []string{} + for _, info := range gpuInfo { + if info.Library != "cuda" { + // TODO shouldn't happen if things are wired correctly... + slog.Debug("cudaGetVisibleDevicesEnv skipping over non-cuda device", "library", info.Library) + continue + } + ids = append(ids, info.ID) + } + return "CUDA_VISIBLE_DEVICES", strings.Join(ids, ",") +} + +func cudaVariant(gpuInfo CudaGPUInfo) string { + if runtime.GOARCH == "arm64" && runtime.GOOS == "linux" { + if CudaTegra != "" { + ver := strings.Split(CudaTegra, ".") + if len(ver) > 0 { + return "jetpack" + ver[0] + } + } else if data, err := os.ReadFile("/etc/nv_tegra_release"); err == nil { + r := regexp.MustCompile(` R(\d+) `) + m := r.FindSubmatch(data) + if len(m) != 2 { + slog.Info("Unexpected format for /etc/nv_tegra_release. Set JETSON_JETPACK to select version") + } else { + if l4t, err := strconv.Atoi(string(m[1])); err == nil { + // Note: mapping from L4t -> JP is inconsistent (can't just subtract 30) + // https://developer.nvidia.com/embedded/jetpack-archive + switch l4t { + case 35: + return "jetpack5" + case 36: + return "jetpack6" + default: + slog.Info("unsupported L4T version", "nv_tegra_release", string(data)) + } + } + } + } + } + + // driver 12.0 has problems with the cuda v12 library, so run v11 on those older drivers + if gpuInfo.DriverMajor < 12 || (gpuInfo.DriverMajor == 12 && gpuInfo.DriverMinor == 0) { + return "v11" + } + return "v12" +} diff --git a/discover/gpu.go b/discover/gpu.go new file mode 100644 index 0000000..15bad44 --- /dev/null +++ b/discover/gpu.go @@ -0,0 +1,718 @@ +//go:build linux || windows + +package discover + +/* +#cgo linux LDFLAGS: -lrt -lpthread -ldl -lstdc++ -lm +#cgo windows LDFLAGS: -lpthread + +#include "gpu_info.h" +*/ +import "C" + +import ( + "fmt" + "log/slog" + "os" + "path/filepath" + "runtime" + "strconv" + "strings" + "sync" + "unsafe" + + "github.com/ollama/ollama/envconfig" + "github.com/ollama/ollama/format" +) + +type cudaHandles struct { + deviceCount int + cudart *C.cudart_handle_t + nvcuda *C.nvcuda_handle_t + nvml *C.nvml_handle_t +} + +type oneapiHandles struct { + oneapi *C.oneapi_handle_t + deviceCount int +} + +const ( + cudaMinimumMemory = 457 * format.MebiByte + rocmMinimumMemory = 457 * format.MebiByte + // TODO OneAPI minimum memory +) + +var ( + gpuMutex sync.Mutex + bootstrapped bool + cpus []CPUInfo + cudaGPUs []CudaGPUInfo + nvcudaLibPath string + cudartLibPath string + oneapiLibPath string + nvmlLibPath string + rocmGPUs []RocmGPUInfo + oneapiGPUs []OneapiGPUInfo + + // If any discovered GPUs are incompatible, report why + unsupportedGPUs []UnsupportedGPUInfo + + // Keep track of errors during bootstrapping so that if GPUs are missing + // they expected to be present this may explain why + bootstrapErrors []error +) + +// With our current CUDA compile flags, older than 5.0 will not work properly +// (string values used to allow ldflags overrides at build time) +var ( + CudaComputeMajorMin = "5" + CudaComputeMinorMin = "0" +) + +var RocmComputeMajorMin = "9" + +// TODO find a better way to detect iGPU instead of minimum memory +const IGPUMemLimit = 1 * format.GibiByte // 512G is what they typically report, so anything less than 1G must be iGPU + +// Note: gpuMutex must already be held +func initCudaHandles() *cudaHandles { + // TODO - if the ollama build is CPU only, don't do these checks as they're irrelevant and confusing + + cHandles := &cudaHandles{} + // Short Circuit if we already know which library to use + // ignore bootstrap errors in this case since we already recorded them + if nvmlLibPath != "" { + cHandles.nvml, _, _ = loadNVMLMgmt([]string{nvmlLibPath}) + return cHandles + } + if nvcudaLibPath != "" { + cHandles.deviceCount, cHandles.nvcuda, _, _ = loadNVCUDAMgmt([]string{nvcudaLibPath}) + return cHandles + } + if cudartLibPath != "" { + cHandles.deviceCount, cHandles.cudart, _, _ = loadCUDARTMgmt([]string{cudartLibPath}) + return cHandles + } + + slog.Debug("searching for GPU discovery libraries for NVIDIA") + var cudartMgmtPatterns []string + + // Aligned with driver, we can't carry as payloads + nvcudaMgmtPatterns := NvcudaGlobs + cudartMgmtPatterns = append(cudartMgmtPatterns, filepath.Join(LibOllamaPath, "cuda_v*", CudartMgmtName)) + cudartMgmtPatterns = append(cudartMgmtPatterns, CudartGlobs...) + + if len(NvmlGlobs) > 0 { + nvmlLibPaths := FindGPULibs(NvmlMgmtName, NvmlGlobs) + if len(nvmlLibPaths) > 0 { + nvml, libPath, err := loadNVMLMgmt(nvmlLibPaths) + if nvml != nil { + slog.Debug("nvidia-ml loaded", "library", libPath) + cHandles.nvml = nvml + nvmlLibPath = libPath + } + if err != nil { + bootstrapErrors = append(bootstrapErrors, err) + } + } + } + + nvcudaLibPaths := FindGPULibs(NvcudaMgmtName, nvcudaMgmtPatterns) + if len(nvcudaLibPaths) > 0 { + deviceCount, nvcuda, libPath, err := loadNVCUDAMgmt(nvcudaLibPaths) + if nvcuda != nil { + slog.Debug("detected GPUs", "count", deviceCount, "library", libPath) + cHandles.nvcuda = nvcuda + cHandles.deviceCount = deviceCount + nvcudaLibPath = libPath + return cHandles + } + if err != nil { + bootstrapErrors = append(bootstrapErrors, err) + } + } + + cudartLibPaths := FindGPULibs(CudartMgmtName, cudartMgmtPatterns) + if len(cudartLibPaths) > 0 { + deviceCount, cudart, libPath, err := loadCUDARTMgmt(cudartLibPaths) + if cudart != nil { + slog.Debug("detected GPUs", "library", libPath, "count", deviceCount) + cHandles.cudart = cudart + cHandles.deviceCount = deviceCount + cudartLibPath = libPath + return cHandles + } + if err != nil { + bootstrapErrors = append(bootstrapErrors, err) + } + } + + return cHandles +} + +// Note: gpuMutex must already be held +func initOneAPIHandles() *oneapiHandles { + oHandles := &oneapiHandles{} + + // Short Circuit if we already know which library to use + // ignore bootstrap errors in this case since we already recorded them + if oneapiLibPath != "" { + oHandles.deviceCount, oHandles.oneapi, _, _ = loadOneapiMgmt([]string{oneapiLibPath}) + return oHandles + } + + oneapiLibPaths := FindGPULibs(OneapiMgmtName, OneapiGlobs) + if len(oneapiLibPaths) > 0 { + var err error + oHandles.deviceCount, oHandles.oneapi, oneapiLibPath, err = loadOneapiMgmt(oneapiLibPaths) + if err != nil { + bootstrapErrors = append(bootstrapErrors, err) + } + } + + return oHandles +} + +func GetCPUInfo() GpuInfoList { + gpuMutex.Lock() + if !bootstrapped { + gpuMutex.Unlock() + GetGPUInfo() + } else { + gpuMutex.Unlock() + } + return GpuInfoList{cpus[0].GpuInfo} +} + +func GetGPUInfo() GpuInfoList { + // TODO - consider exploring lspci (and equivalent on windows) to check for + // GPUs so we can report warnings if we see Nvidia/AMD but fail to load the libraries + gpuMutex.Lock() + defer gpuMutex.Unlock() + needRefresh := true + var cHandles *cudaHandles + var oHandles *oneapiHandles + defer func() { + if cHandles != nil { + if cHandles.cudart != nil { + C.cudart_release(*cHandles.cudart) + } + if cHandles.nvcuda != nil { + C.nvcuda_release(*cHandles.nvcuda) + } + if cHandles.nvml != nil { + C.nvml_release(*cHandles.nvml) + } + } + if oHandles != nil { + if oHandles.oneapi != nil { + // TODO - is this needed? + C.oneapi_release(*oHandles.oneapi) + } + } + }() + + if !bootstrapped { + slog.Info("looking for compatible GPUs") + cudaComputeMajorMin, err := strconv.Atoi(CudaComputeMajorMin) + if err != nil { + slog.Error("invalid CudaComputeMajorMin setting", "value", CudaComputeMajorMin, "error", err) + } + cudaComputeMinorMin, err := strconv.Atoi(CudaComputeMinorMin) + if err != nil { + slog.Error("invalid CudaComputeMinorMin setting", "value", CudaComputeMinorMin, "error", err) + } + bootstrapErrors = []error{} + needRefresh = false + var memInfo C.mem_info_t + + mem, err := GetCPUMem() + if err != nil { + slog.Warn("error looking up system memory", "error", err) + } + + details, err := GetCPUDetails() + if err != nil { + slog.Warn("failed to lookup CPU details", "error", err) + } + cpus = []CPUInfo{ + { + GpuInfo: GpuInfo{ + memInfo: mem, + Library: "cpu", + ID: "0", + }, + CPUs: details, + }, + } + + // Load ALL libraries + cHandles = initCudaHandles() + + // NVIDIA + for i := range cHandles.deviceCount { + if cHandles.cudart != nil || cHandles.nvcuda != nil { + gpuInfo := CudaGPUInfo{ + GpuInfo: GpuInfo{ + Library: "cuda", + }, + index: i, + } + var driverMajor int + var driverMinor int + if cHandles.cudart != nil { + C.cudart_bootstrap(*cHandles.cudart, C.int(i), &memInfo) + } else { + C.nvcuda_bootstrap(*cHandles.nvcuda, C.int(i), &memInfo) + driverMajor = int(cHandles.nvcuda.driver_major) + driverMinor = int(cHandles.nvcuda.driver_minor) + } + if memInfo.err != nil { + slog.Info("error looking up nvidia GPU memory", "error", C.GoString(memInfo.err)) + C.free(unsafe.Pointer(memInfo.err)) + continue + } + gpuInfo.TotalMemory = uint64(memInfo.total) + gpuInfo.FreeMemory = uint64(memInfo.free) + gpuInfo.ID = C.GoString(&memInfo.gpu_id[0]) + gpuInfo.Compute = fmt.Sprintf("%d.%d", memInfo.major, memInfo.minor) + gpuInfo.computeMajor = int(memInfo.major) + gpuInfo.computeMinor = int(memInfo.minor) + gpuInfo.MinimumMemory = cudaMinimumMemory + gpuInfo.DriverMajor = driverMajor + gpuInfo.DriverMinor = driverMinor + variant := cudaVariant(gpuInfo) + + // Start with our bundled libraries + if variant != "" { + variantPath := filepath.Join(LibOllamaPath, "cuda_"+variant) + if _, err := os.Stat(variantPath); err == nil { + // Put the variant directory first in the search path to avoid runtime linking to the wrong library + gpuInfo.DependencyPath = append([]string{variantPath}, gpuInfo.DependencyPath...) + } + } + gpuInfo.Name = C.GoString(&memInfo.gpu_name[0]) + gpuInfo.Variant = variant + + if int(memInfo.major) < cudaComputeMajorMin || (int(memInfo.major) == cudaComputeMajorMin && int(memInfo.minor) < cudaComputeMinorMin) { + unsupportedGPUs = append(unsupportedGPUs, + UnsupportedGPUInfo{ + GpuInfo: gpuInfo.GpuInfo, + }) + slog.Info(fmt.Sprintf("[%d] CUDA GPU is too old. Compute Capability detected: %d.%d", i, memInfo.major, memInfo.minor)) + continue + } + + // query the management library as well so we can record any skew between the two + // which represents overhead on the GPU we must set aside on subsequent updates + if cHandles.nvml != nil { + uuid := C.CString(gpuInfo.ID) + defer C.free(unsafe.Pointer(uuid)) + C.nvml_get_free(*cHandles.nvml, uuid, &memInfo.free, &memInfo.total, &memInfo.used) + if memInfo.err != nil { + slog.Warn("error looking up nvidia GPU memory", "error", C.GoString(memInfo.err)) + C.free(unsafe.Pointer(memInfo.err)) + } else { + if memInfo.free != 0 && uint64(memInfo.free) > gpuInfo.FreeMemory { + gpuInfo.OSOverhead = uint64(memInfo.free) - gpuInfo.FreeMemory + slog.Info("detected OS VRAM overhead", + "id", gpuInfo.ID, + "library", gpuInfo.Library, + "compute", gpuInfo.Compute, + "driver", fmt.Sprintf("%d.%d", gpuInfo.DriverMajor, gpuInfo.DriverMinor), + "name", gpuInfo.Name, + "overhead", format.HumanBytes2(gpuInfo.OSOverhead), + ) + } + } + } + + // TODO potentially sort on our own algorithm instead of what the underlying GPU library does... + cudaGPUs = append(cudaGPUs, gpuInfo) + } + } + + // Intel + if envconfig.IntelGPU() { + oHandles = initOneAPIHandles() + if oHandles != nil && oHandles.oneapi != nil { + for d := range oHandles.oneapi.num_drivers { + if oHandles.oneapi == nil { + // shouldn't happen + slog.Warn("nil oneapi handle with driver count", "count", int(oHandles.oneapi.num_drivers)) + continue + } + devCount := C.oneapi_get_device_count(*oHandles.oneapi, C.int(d)) + for i := range devCount { + gpuInfo := OneapiGPUInfo{ + GpuInfo: GpuInfo{ + Library: "oneapi", + }, + driverIndex: int(d), + gpuIndex: int(i), + } + // TODO - split bootstrapping from updating free memory + C.oneapi_check_vram(*oHandles.oneapi, C.int(d), i, &memInfo) + // TODO - convert this to MinimumMemory based on testing... + var totalFreeMem float64 = float64(memInfo.free) * 0.95 // work-around: leave some reserve vram for mkl lib used in ggml-sycl backend. + memInfo.free = C.uint64_t(totalFreeMem) + gpuInfo.TotalMemory = uint64(memInfo.total) + gpuInfo.FreeMemory = uint64(memInfo.free) + gpuInfo.ID = C.GoString(&memInfo.gpu_id[0]) + gpuInfo.Name = C.GoString(&memInfo.gpu_name[0]) + gpuInfo.DependencyPath = []string{LibOllamaPath} + oneapiGPUs = append(oneapiGPUs, gpuInfo) + } + } + } + } + + rocmGPUs, err = AMDGetGPUInfo() + if err != nil { + bootstrapErrors = append(bootstrapErrors, err) + } + bootstrapped = true + if len(cudaGPUs) == 0 && len(rocmGPUs) == 0 && len(oneapiGPUs) == 0 { + slog.Info("no compatible GPUs were discovered") + } + + // TODO verify we have runners for the discovered GPUs, filter out any that aren't supported with good error messages + } + + // For detected GPUs, load library if not loaded + + // Refresh free memory usage + if needRefresh { + mem, err := GetCPUMem() + if err != nil { + slog.Warn("error looking up system memory", "error", err) + } else { + slog.Debug("updating system memory data", + slog.Group( + "before", + "total", format.HumanBytes2(cpus[0].TotalMemory), + "free", format.HumanBytes2(cpus[0].FreeMemory), + "free_swap", format.HumanBytes2(cpus[0].FreeSwap), + ), + slog.Group( + "now", + "total", format.HumanBytes2(mem.TotalMemory), + "free", format.HumanBytes2(mem.FreeMemory), + "free_swap", format.HumanBytes2(mem.FreeSwap), + ), + ) + cpus[0].FreeMemory = mem.FreeMemory + cpus[0].FreeSwap = mem.FreeSwap + } + + var memInfo C.mem_info_t + if cHandles == nil && len(cudaGPUs) > 0 { + cHandles = initCudaHandles() + } + for i, gpu := range cudaGPUs { + if cHandles.nvml != nil { + uuid := C.CString(gpu.ID) + defer C.free(unsafe.Pointer(uuid)) + C.nvml_get_free(*cHandles.nvml, uuid, &memInfo.free, &memInfo.total, &memInfo.used) + } else if cHandles.cudart != nil { + C.cudart_bootstrap(*cHandles.cudart, C.int(gpu.index), &memInfo) + } else if cHandles.nvcuda != nil { + C.nvcuda_get_free(*cHandles.nvcuda, C.int(gpu.index), &memInfo.free, &memInfo.total) + memInfo.used = memInfo.total - memInfo.free + } else { + // shouldn't happen + slog.Warn("no valid cuda library loaded to refresh vram usage") + break + } + if memInfo.err != nil { + slog.Warn("error looking up nvidia GPU memory", "error", C.GoString(memInfo.err)) + C.free(unsafe.Pointer(memInfo.err)) + continue + } + if memInfo.free == 0 { + slog.Warn("error looking up nvidia GPU memory") + continue + } + if cHandles.nvml != nil && gpu.OSOverhead > 0 { + // When using the management library update based on recorded overhead + memInfo.free -= C.uint64_t(gpu.OSOverhead) + } + slog.Debug("updating cuda memory data", + "gpu", gpu.ID, + "name", gpu.Name, + "overhead", format.HumanBytes2(gpu.OSOverhead), + slog.Group( + "before", + "total", format.HumanBytes2(gpu.TotalMemory), + "free", format.HumanBytes2(gpu.FreeMemory), + ), + slog.Group( + "now", + "total", format.HumanBytes2(uint64(memInfo.total)), + "free", format.HumanBytes2(uint64(memInfo.free)), + "used", format.HumanBytes2(uint64(memInfo.used)), + ), + ) + cudaGPUs[i].FreeMemory = uint64(memInfo.free) + } + + if oHandles == nil && len(oneapiGPUs) > 0 { + oHandles = initOneAPIHandles() + } + for i, gpu := range oneapiGPUs { + if oHandles.oneapi == nil { + // shouldn't happen + slog.Warn("nil oneapi handle with device count", "count", oHandles.deviceCount) + continue + } + C.oneapi_check_vram(*oHandles.oneapi, C.int(gpu.driverIndex), C.int(gpu.gpuIndex), &memInfo) + // TODO - convert this to MinimumMemory based on testing... + var totalFreeMem float64 = float64(memInfo.free) * 0.95 // work-around: leave some reserve vram for mkl lib used in ggml-sycl backend. + memInfo.free = C.uint64_t(totalFreeMem) + oneapiGPUs[i].FreeMemory = uint64(memInfo.free) + } + + err = RocmGPUInfoList(rocmGPUs).RefreshFreeMemory() + if err != nil { + slog.Debug("problem refreshing ROCm free memory", "error", err) + } + } + + resp := []GpuInfo{} + for _, gpu := range cudaGPUs { + resp = append(resp, gpu.GpuInfo) + } + for _, gpu := range rocmGPUs { + resp = append(resp, gpu.GpuInfo) + } + for _, gpu := range oneapiGPUs { + resp = append(resp, gpu.GpuInfo) + } + if len(resp) == 0 { + resp = append(resp, cpus[0].GpuInfo) + } + return resp +} + +func FindGPULibs(baseLibName string, defaultPatterns []string) []string { + // Multiple GPU libraries may exist, and some may not work, so keep trying until we exhaust them + gpuLibPaths := []string{} + slog.Debug("Searching for GPU library", "name", baseLibName) + + // search our bundled libraries first + patterns := []string{filepath.Join(LibOllamaPath, baseLibName)} + + var ldPaths []string + switch runtime.GOOS { + case "windows": + ldPaths = strings.Split(os.Getenv("PATH"), string(os.PathListSeparator)) + case "linux": + ldPaths = strings.Split(os.Getenv("LD_LIBRARY_PATH"), string(os.PathListSeparator)) + } + + // then search the system's LD_LIBRARY_PATH + for _, p := range ldPaths { + p, err := filepath.Abs(p) + if err != nil { + continue + } + patterns = append(patterns, filepath.Join(p, baseLibName)) + } + + // finally, search the default patterns provided by the caller + patterns = append(patterns, defaultPatterns...) + slog.Debug("gpu library search", "globs", patterns) + for _, pattern := range patterns { + // Nvidia PhysX known to return bogus results + if strings.Contains(pattern, "PhysX") { + slog.Debug("skipping PhysX cuda library path", "path", pattern) + continue + } + // Ignore glob discovery errors + matches, _ := filepath.Glob(pattern) + for _, match := range matches { + // Resolve any links so we don't try the same lib multiple times + // and weed out any dups across globs + libPath := match + tmp := match + var err error + for ; err == nil; tmp, err = os.Readlink(libPath) { + if !filepath.IsAbs(tmp) { + tmp = filepath.Join(filepath.Dir(libPath), tmp) + } + libPath = tmp + } + new := true + for _, cmp := range gpuLibPaths { + if cmp == libPath { + new = false + break + } + } + if new { + gpuLibPaths = append(gpuLibPaths, libPath) + } + } + } + slog.Debug("discovered GPU libraries", "paths", gpuLibPaths) + return gpuLibPaths +} + +// Bootstrap the runtime library +// Returns: num devices, handle, libPath, error +func loadCUDARTMgmt(cudartLibPaths []string) (int, *C.cudart_handle_t, string, error) { + var resp C.cudart_init_resp_t + resp.ch.verbose = getVerboseState() + var err error + for _, libPath := range cudartLibPaths { + lib := C.CString(libPath) + defer C.free(unsafe.Pointer(lib)) + C.cudart_init(lib, &resp) + if resp.err != nil { + err = fmt.Errorf("Unable to load cudart library %s: %s", libPath, C.GoString(resp.err)) + slog.Debug(err.Error()) + C.free(unsafe.Pointer(resp.err)) + } else { + err = nil + return int(resp.num_devices), &resp.ch, libPath, err + } + } + return 0, nil, "", err +} + +// Bootstrap the driver library +// Returns: num devices, handle, libPath, error +func loadNVCUDAMgmt(nvcudaLibPaths []string) (int, *C.nvcuda_handle_t, string, error) { + var resp C.nvcuda_init_resp_t + resp.ch.verbose = getVerboseState() + var err error + for _, libPath := range nvcudaLibPaths { + lib := C.CString(libPath) + defer C.free(unsafe.Pointer(lib)) + C.nvcuda_init(lib, &resp) + if resp.err != nil { + // Decide what log level based on the type of error message to help users understand why + switch resp.cudaErr { + case C.CUDA_ERROR_INSUFFICIENT_DRIVER, C.CUDA_ERROR_SYSTEM_DRIVER_MISMATCH: + err = fmt.Errorf("version mismatch between driver and cuda driver library - reboot or upgrade may be required: library %s", libPath) + slog.Warn(err.Error()) + case C.CUDA_ERROR_NO_DEVICE: + err = fmt.Errorf("no nvidia devices detected by library %s", libPath) + slog.Info(err.Error()) + case C.CUDA_ERROR_UNKNOWN: + err = fmt.Errorf("unknown error initializing cuda driver library %s: %s. see https://github.com/ollama/ollama/blob/main/docs/troubleshooting.md for more information", libPath, C.GoString(resp.err)) + slog.Warn(err.Error()) + default: + msg := C.GoString(resp.err) + if strings.Contains(msg, "wrong ELF class") { + slog.Debug("skipping 32bit library", "library", libPath) + } else { + err = fmt.Errorf("Unable to load cudart library %s: %s", libPath, C.GoString(resp.err)) + slog.Info(err.Error()) + } + } + C.free(unsafe.Pointer(resp.err)) + } else { + err = nil + return int(resp.num_devices), &resp.ch, libPath, err + } + } + return 0, nil, "", err +} + +// Bootstrap the management library +// Returns: handle, libPath, error +func loadNVMLMgmt(nvmlLibPaths []string) (*C.nvml_handle_t, string, error) { + var resp C.nvml_init_resp_t + resp.ch.verbose = getVerboseState() + var err error + for _, libPath := range nvmlLibPaths { + lib := C.CString(libPath) + defer C.free(unsafe.Pointer(lib)) + C.nvml_init(lib, &resp) + if resp.err != nil { + err = fmt.Errorf("Unable to load NVML management library %s: %s", libPath, C.GoString(resp.err)) + slog.Info(err.Error()) + C.free(unsafe.Pointer(resp.err)) + } else { + err = nil + return &resp.ch, libPath, err + } + } + return nil, "", err +} + +// bootstrap the Intel GPU library +// Returns: num devices, handle, libPath, error +func loadOneapiMgmt(oneapiLibPaths []string) (int, *C.oneapi_handle_t, string, error) { + var resp C.oneapi_init_resp_t + num_devices := 0 + resp.oh.verbose = getVerboseState() + var err error + for _, libPath := range oneapiLibPaths { + lib := C.CString(libPath) + defer C.free(unsafe.Pointer(lib)) + C.oneapi_init(lib, &resp) + if resp.err != nil { + err = fmt.Errorf("Unable to load oneAPI management library %s: %s", libPath, C.GoString(resp.err)) + slog.Debug(err.Error()) + C.free(unsafe.Pointer(resp.err)) + } else { + err = nil + for i := range resp.oh.num_drivers { + num_devices += int(C.oneapi_get_device_count(resp.oh, C.int(i))) + } + return num_devices, &resp.oh, libPath, err + } + } + return 0, nil, "", err +} + +func getVerboseState() C.uint16_t { + if envconfig.LogLevel() < slog.LevelInfo { + return C.uint16_t(1) + } + return C.uint16_t(0) +} + +// Given the list of GPUs this instantiation is targeted for, +// figure out the visible devices environment variable +// +// If different libraries are detected, the first one is what we use +func (l GpuInfoList) GetVisibleDevicesEnv() (string, string) { + if len(l) == 0 { + return "", "" + } + switch l[0].Library { + case "cuda": + return cudaGetVisibleDevicesEnv(l) + case "rocm": + return rocmGetVisibleDevicesEnv(l) + case "oneapi": + return oneapiGetVisibleDevicesEnv(l) + default: + slog.Debug("no filter required for library " + l[0].Library) + return "", "" + } +} + +func GetSystemInfo() SystemInfo { + gpus := GetGPUInfo() + gpuMutex.Lock() + defer gpuMutex.Unlock() + discoveryErrors := []string{} + for _, err := range bootstrapErrors { + discoveryErrors = append(discoveryErrors, err.Error()) + } + if len(gpus) == 1 && gpus[0].Library == "cpu" { + gpus = []GpuInfo{} + } + + return SystemInfo{ + System: cpus[0], + GPUs: gpus, + UnsupportedGPUs: unsupportedGPUs, + DiscoveryErrors: discoveryErrors, + } +} diff --git a/discover/gpu_darwin.go b/discover/gpu_darwin.go new file mode 100644 index 0000000..dd5bf6e --- /dev/null +++ b/discover/gpu_darwin.go @@ -0,0 +1,99 @@ +//go:build darwin + +package discover + +/* +#cgo CFLAGS: -x objective-c +#cgo LDFLAGS: -framework Foundation -framework CoreGraphics -framework Metal +#include "gpu_info_darwin.h" +*/ +import "C" + +import ( + "log/slog" + "runtime" + "syscall" + + "github.com/ollama/ollama/format" +) + +const ( + metalMinimumMemory = 512 * format.MebiByte +) + +func GetGPUInfo() GpuInfoList { + mem, _ := GetCPUMem() + if runtime.GOARCH == "amd64" { + return []GpuInfo{ + { + Library: "cpu", + memInfo: mem, + }, + } + } + info := GpuInfo{ + Library: "metal", + ID: "0", + } + info.TotalMemory = uint64(C.getRecommendedMaxVRAM()) + + // TODO is there a way to gather actual allocated video memory? (currentAllocatedSize doesn't work) + info.FreeMemory = info.TotalMemory + + info.MinimumMemory = metalMinimumMemory + return []GpuInfo{info} +} + +func GetCPUInfo() GpuInfoList { + mem, _ := GetCPUMem() + return []GpuInfo{ + { + Library: "cpu", + memInfo: mem, + }, + } +} + +func GetCPUMem() (memInfo, error) { + return memInfo{ + TotalMemory: uint64(C.getPhysicalMemory()), + FreeMemory: uint64(C.getFreeMemory()), + // FreeSwap omitted as Darwin uses dynamic paging + }, nil +} + +func (l GpuInfoList) GetVisibleDevicesEnv() (string, string) { + // No-op on darwin + return "", "" +} + +func GetSystemInfo() SystemInfo { + mem, _ := GetCPUMem() + query := "hw.perflevel0.physicalcpu" + perfCores, err := syscall.SysctlUint32(query) + if err != nil { + slog.Warn("failed to discover physical CPU details", "query", query, "error", err) + } + query = "hw.perflevel1.physicalcpu" + efficiencyCores, _ := syscall.SysctlUint32(query) // On x86 xeon this wont return data + + // Determine thread count + query = "hw.logicalcpu" + logicalCores, _ := syscall.SysctlUint32(query) + + return SystemInfo{ + System: CPUInfo{ + GpuInfo: GpuInfo{ + memInfo: mem, + }, + CPUs: []CPU{ + { + CoreCount: int(perfCores + efficiencyCores), + EfficiencyCoreCount: int(efficiencyCores), + ThreadCount: int(logicalCores), + }, + }, + }, + GPUs: GetGPUInfo(), + } +} diff --git a/discover/gpu_info.h b/discover/gpu_info.h new file mode 100644 index 0000000..ee7ff4c --- /dev/null +++ b/discover/gpu_info.h @@ -0,0 +1,72 @@ +#ifndef __APPLE__ +#ifndef __GPU_INFO_H__ +#define __GPU_INFO_H__ +#include +#include +#include + +#ifndef _WIN32 +#include +#define LOAD_LIBRARY(lib, flags) dlopen(lib, flags) +#define LOAD_SYMBOL(handle, sym) dlsym(handle, sym) +#define LOAD_ERR() strdup(dlerror()) +#define UNLOAD_LIBRARY(handle) dlclose(handle) +#else +#include +#define LOAD_LIBRARY(lib, flags) LoadLibrary(lib) +#define LOAD_SYMBOL(handle, sym) GetProcAddress(handle, sym) +#define UNLOAD_LIBRARY(handle) FreeLibrary(handle) +#define LOAD_ERR() ({\ + LPSTR messageBuffer = NULL; \ + size_t size = FormatMessageA(FORMAT_MESSAGE_ALLOCATE_BUFFER | FORMAT_MESSAGE_FROM_SYSTEM | FORMAT_MESSAGE_IGNORE_INSERTS, \ + NULL, GetLastError(), MAKELANGID(LANG_NEUTRAL, SUBLANG_DEFAULT), (LPSTR)&messageBuffer, 0, NULL); \ + char *resp = strdup(messageBuffer); \ + LocalFree(messageBuffer); \ + resp; \ +}) + +#endif + +#ifndef LOG +#define LOG(verbose, ...) \ + do { \ + if (verbose) { \ + fprintf(stderr, __VA_ARGS__); \ + } \ + } while (0) +#endif + +#ifdef __cplusplus +extern "C" { +#endif + +#define GPU_ID_LEN 64 +#define GPU_NAME_LEN 96 + +typedef struct mem_info { + char *err; // If non-nill, caller responsible for freeing + char gpu_id[GPU_ID_LEN]; + char gpu_name[GPU_NAME_LEN]; + uint64_t total; + uint64_t free; + uint64_t used; + + // Compute Capability + int major; + int minor; + int patch; +} mem_info_t; + +void cpu_check_ram(mem_info_t *resp); + +#ifdef __cplusplus +} +#endif + +#include "gpu_info_cudart.h" +#include "gpu_info_nvcuda.h" +#include "gpu_info_nvml.h" +#include "gpu_info_oneapi.h" + +#endif // __GPU_INFO_H__ +#endif // __APPLE__ diff --git a/discover/gpu_info_cudart.c b/discover/gpu_info_cudart.c new file mode 100644 index 0000000..bc5115b --- /dev/null +++ b/discover/gpu_info_cudart.c @@ -0,0 +1,184 @@ +#ifndef __APPLE__ // TODO - maybe consider nvidia support on intel macs? + +#include +#include +#include "gpu_info_cudart.h" + +void cudart_init(char *cudart_lib_path, cudart_init_resp_t *resp) { + cudartReturn_t ret; + resp->err = NULL; + resp->num_devices = 0; + const int buflen = 256; + char buf[buflen + 1]; + int i; + + struct lookup { + char *s; + void **p; + } l[] = { + {"cudaSetDevice", (void *)&resp->ch.cudaSetDevice}, + {"cudaDeviceSynchronize", (void *)&resp->ch.cudaDeviceSynchronize}, + {"cudaDeviceReset", (void *)&resp->ch.cudaDeviceReset}, + {"cudaMemGetInfo", (void *)&resp->ch.cudaMemGetInfo}, + {"cudaGetDeviceCount", (void *)&resp->ch.cudaGetDeviceCount}, + {"cudaDeviceGetAttribute", (void *)&resp->ch.cudaDeviceGetAttribute}, + {"cudaDriverGetVersion", (void *)&resp->ch.cudaDriverGetVersion}, + {"cudaGetDeviceProperties", (void *)&resp->ch.cudaGetDeviceProperties}, + {NULL, NULL}, + }; + + resp->ch.handle = LOAD_LIBRARY(cudart_lib_path, RTLD_LAZY); + if (!resp->ch.handle) { + char *msg = LOAD_ERR(); + LOG(resp->ch.verbose, "library %s load err: %s\n", cudart_lib_path, msg); + snprintf(buf, buflen, + "Unable to load %s library to query for Nvidia GPUs: %s", + cudart_lib_path, msg); + free(msg); + resp->err = strdup(buf); + return; + } + + for (i = 0; l[i].s != NULL; i++) { + *l[i].p = LOAD_SYMBOL(resp->ch.handle, l[i].s); + if (!*(l[i].p)) { + char *msg = LOAD_ERR(); + LOG(resp->ch.verbose, "dlerr: %s\n", msg); + UNLOAD_LIBRARY(resp->ch.handle); + resp->ch.handle = NULL; + snprintf(buf, buflen, "symbol lookup for %s failed: %s", l[i].s, + msg); + free(msg); + resp->err = strdup(buf); + return; + } + } + + ret = (*resp->ch.cudaSetDevice)(0); + if (ret != CUDART_SUCCESS) { + LOG(resp->ch.verbose, "cudaSetDevice err: %d\n", ret); + UNLOAD_LIBRARY(resp->ch.handle); + resp->ch.handle = NULL; + if (ret == CUDART_ERROR_INSUFFICIENT_DRIVER) { + resp->err = strdup("your nvidia driver is too old or missing. If you have a CUDA GPU please upgrade to run ollama"); + return; + } + snprintf(buf, buflen, "cudart init failure: %d", ret); + resp->err = strdup(buf); + return; + } + + int version = 0; + cudartDriverVersion_t driverVersion; + driverVersion.major = 0; + driverVersion.minor = 0; + + // Report driver version if we're in verbose mode, ignore errors + ret = (*resp->ch.cudaDriverGetVersion)(&version); + if (ret != CUDART_SUCCESS) { + LOG(resp->ch.verbose, "cudaDriverGetVersion failed: %d\n", ret); + } else { + driverVersion.major = version / 1000; + driverVersion.minor = (version - (driverVersion.major * 1000)) / 10; + LOG(resp->ch.verbose, "CUDA driver version: %d-%d\n", driverVersion.major, driverVersion.minor); + } + + ret = (*resp->ch.cudaGetDeviceCount)(&resp->num_devices); + if (ret != CUDART_SUCCESS) { + LOG(resp->ch.verbose, "cudaGetDeviceCount err: %d\n", ret); + UNLOAD_LIBRARY(resp->ch.handle); + resp->ch.handle = NULL; + snprintf(buf, buflen, "unable to get device count: %d", ret); + resp->err = strdup(buf); + return; + } +} + + +void cudart_bootstrap(cudart_handle_t h, int i, mem_info_t *resp) { + resp->err = NULL; + cudartMemory_t memInfo = {0,0,0}; + cudartReturn_t ret; + const int buflen = 256; + char buf[buflen + 1]; + + if (h.handle == NULL) { + resp->err = strdup("cudart handle isn't initialized"); + return; + } + + ret = (*h.cudaSetDevice)(i); + if (ret != CUDART_SUCCESS) { + snprintf(buf, buflen, "cudart device failed to initialize"); + resp->err = strdup(buf); + return; + } + + cudaDeviceProp_t props; + ret = (*h.cudaGetDeviceProperties)(&props, i); + if (ret != CUDART_SUCCESS) { + LOG(h.verbose, "[%d] device properties lookup failure: %d\n", i, ret); + snprintf(&resp->gpu_id[0], GPU_ID_LEN, "%d", i); + resp->major = 0; + resp->minor = 0; + } else { + int allNull = 1; + for (int j = 0; j < 16; j++) { + if (props.uuid.bytes[j] != 0) { + allNull = 0; + break; + } + } + if (allNull != 0) { + snprintf(&resp->gpu_id[0], GPU_ID_LEN, "%d", i); + } else { + // GPU-d110a105-ac29-1d54-7b49-9c90440f215b + snprintf(&resp->gpu_id[0], GPU_ID_LEN, + "GPU-%02x%02x%02x%02x-%02x%02x-%02x%02x-%02x%02x-%02x%02x%02x%02x%02x%02x", + props.uuid.bytes[0], + props.uuid.bytes[1], + props.uuid.bytes[2], + props.uuid.bytes[3], + props.uuid.bytes[4], + props.uuid.bytes[5], + props.uuid.bytes[6], + props.uuid.bytes[7], + props.uuid.bytes[8], + props.uuid.bytes[9], + props.uuid.bytes[10], + props.uuid.bytes[11], + props.uuid.bytes[12], + props.uuid.bytes[13], + props.uuid.bytes[14], + props.uuid.bytes[15] + ); + } + resp->major = props.major; + resp->minor = props.minor; + + // TODO add other useful properties from props + } + ret = (*h.cudaMemGetInfo)(&memInfo.free, &memInfo.total); + if (ret != CUDART_SUCCESS) { + snprintf(buf, buflen, "cudart device memory info lookup failure %d", ret); + resp->err = strdup(buf); + return; + } + + resp->total = memInfo.total; + resp->free = memInfo.free; + resp->used = memInfo.used; + + LOG(h.verbose, "[%s] CUDA totalMem %" PRId64 "\n", resp->gpu_id, resp->total); + LOG(h.verbose, "[%s] CUDA freeMem %" PRId64 "\n", resp->gpu_id, resp->free); + LOG(h.verbose, "[%s] CUDA usedMem %" PRId64 "\n", resp->gpu_id, resp->used); + LOG(h.verbose, "[%s] Compute Capability %d.%d\n", resp->gpu_id, resp->major, resp->minor); +} + +void cudart_release(cudart_handle_t h) { + LOG(h.verbose, "releasing cudart library\n"); + UNLOAD_LIBRARY(h.handle); + h.handle = NULL; +} + +#endif // __APPLE__ diff --git a/discover/gpu_info_cudart.h b/discover/gpu_info_cudart.h new file mode 100644 index 0000000..ff0c0af --- /dev/null +++ b/discover/gpu_info_cudart.h @@ -0,0 +1,148 @@ +#ifndef __APPLE__ +#ifndef __GPU_INFO_CUDART_H__ +#define __GPU_INFO_CUDART_H__ +#include "gpu_info.h" + +// Just enough typedef's to dlopen/dlsym for memory information +typedef enum cudartReturn_enum { + CUDART_SUCCESS = 0, + CUDART_ERROR_INVALID_VALUE = 1, + CUDART_ERROR_MEMORY_ALLOCATION = 2, + CUDART_ERROR_INSUFFICIENT_DRIVER = 35, + // Other values omitted for now... +} cudartReturn_t; + +typedef enum cudartDeviceAttr_enum { + cudartDevAttrComputeCapabilityMajor = 75, + cudartDevAttrComputeCapabilityMinor = 76, + + // TODO - not yet wired up but may be useful for Jetson or other + // integrated GPU scenarios with shared memory + cudaDevAttrIntegrated = 18 + +} cudartDeviceAttr_t; + +typedef void *cudartDevice_t; // Opaque is sufficient +typedef struct cudartMemory_st { + size_t total; + size_t free; + size_t used; +} cudartMemory_t; + +typedef struct cudartDriverVersion { + int major; + int minor; +} cudartDriverVersion_t; + +typedef struct cudaUUID { + unsigned char bytes[16]; +} cudaUUID_t; +typedef struct cudaDeviceProp { + char name[256]; /**< ASCII string identifying device */ + cudaUUID_t uuid; /**< 16-byte unique identifier */ + char luid[8]; /**< 8-byte locally unique identifier. Value is undefined on TCC and non-Windows platforms */ + unsigned int luidDeviceNodeMask; /**< LUID device node mask. Value is undefined on TCC and non-Windows platforms */ + size_t totalGlobalMem; /**< Global memory available on device in bytes */ + size_t sharedMemPerBlock; /**< Shared memory available per block in bytes */ + int regsPerBlock; /**< 32-bit registers available per block */ + int warpSize; /**< Warp size in threads */ + size_t memPitch; /**< Maximum pitch in bytes allowed by memory copies */ + int maxThreadsPerBlock; /**< Maximum number of threads per block */ + int maxThreadsDim[3]; /**< Maximum size of each dimension of a block */ + int maxGridSize[3]; /**< Maximum size of each dimension of a grid */ + int clockRate; /**< Clock frequency in kilohertz */ + size_t totalConstMem; /**< Constant memory available on device in bytes */ + int major; /**< Major compute capability */ + int minor; /**< Minor compute capability */ + size_t textureAlignment; /**< Alignment requirement for textures */ + size_t texturePitchAlignment; /**< Pitch alignment requirement for texture references bound to pitched memory */ + int deviceOverlap; /**< Device can concurrently copy memory and execute a kernel. Deprecated. Use instead asyncEngineCount. */ + int multiProcessorCount; /**< Number of multiprocessors on device */ + int kernelExecTimeoutEnabled; /**< Specified whether there is a run time limit on kernels */ + int integrated; /**< Device is integrated as opposed to discrete */ + int canMapHostMemory; /**< Device can map host memory with cudaHostAlloc/cudaHostGetDevicePointer */ + int computeMode; /**< Compute mode (See ::cudaComputeMode) */ + int maxTexture1D; /**< Maximum 1D texture size */ + int maxTexture1DMipmap; /**< Maximum 1D mipmapped texture size */ + int maxTexture1DLinear; /**< Deprecated, do not use. Use cudaDeviceGetTexture1DLinearMaxWidth() or cuDeviceGetTexture1DLinearMaxWidth() instead. */ + int maxTexture2D[2]; /**< Maximum 2D texture dimensions */ + int maxTexture2DMipmap[2]; /**< Maximum 2D mipmapped texture dimensions */ + int maxTexture2DLinear[3]; /**< Maximum dimensions (width, height, pitch) for 2D textures bound to pitched memory */ + int maxTexture2DGather[2]; /**< Maximum 2D texture dimensions if texture gather operations have to be performed */ + int maxTexture3D[3]; /**< Maximum 3D texture dimensions */ + int maxTexture3DAlt[3]; /**< Maximum alternate 3D texture dimensions */ + int maxTextureCubemap; /**< Maximum Cubemap texture dimensions */ + int maxTexture1DLayered[2]; /**< Maximum 1D layered texture dimensions */ + int maxTexture2DLayered[3]; /**< Maximum 2D layered texture dimensions */ + int maxTextureCubemapLayered[2];/**< Maximum Cubemap layered texture dimensions */ + int maxSurface1D; /**< Maximum 1D surface size */ + int maxSurface2D[2]; /**< Maximum 2D surface dimensions */ + int maxSurface3D[3]; /**< Maximum 3D surface dimensions */ + int maxSurface1DLayered[2]; /**< Maximum 1D layered surface dimensions */ + int maxSurface2DLayered[3]; /**< Maximum 2D layered surface dimensions */ + int maxSurfaceCubemap; /**< Maximum Cubemap surface dimensions */ + int maxSurfaceCubemapLayered[2];/**< Maximum Cubemap layered surface dimensions */ + size_t surfaceAlignment; /**< Alignment requirements for surfaces */ + int concurrentKernels; /**< Device can possibly execute multiple kernels concurrently */ + int ECCEnabled; /**< Device has ECC support enabled */ + int pciBusID; /**< PCI bus ID of the device */ + int pciDeviceID; /**< PCI device ID of the device */ + int pciDomainID; /**< PCI domain ID of the device */ + int tccDriver; /**< 1 if device is a Tesla device using TCC driver, 0 otherwise */ + int asyncEngineCount; /**< Number of asynchronous engines */ + int unifiedAddressing; /**< Device shares a unified address space with the host */ + int memoryClockRate; /**< Peak memory clock frequency in kilohertz */ + int memoryBusWidth; /**< Global memory bus width in bits */ + int l2CacheSize; /**< Size of L2 cache in bytes */ + int persistingL2CacheMaxSize; /**< Device's maximum l2 persisting lines capacity setting in bytes */ + int maxThreadsPerMultiProcessor;/**< Maximum resident threads per multiprocessor */ + int streamPrioritiesSupported; /**< Device supports stream priorities */ + int globalL1CacheSupported; /**< Device supports caching globals in L1 */ + int localL1CacheSupported; /**< Device supports caching locals in L1 */ + size_t sharedMemPerMultiprocessor; /**< Shared memory available per multiprocessor in bytes */ + int regsPerMultiprocessor; /**< 32-bit registers available per multiprocessor */ + int managedMemory; /**< Device supports allocating managed memory on this system */ + int isMultiGpuBoard; /**< Device is on a multi-GPU board */ + int multiGpuBoardGroupID; /**< Unique identifier for a group of devices on the same multi-GPU board */ + int hostNativeAtomicSupported; /**< Link between the device and the host supports native atomic operations */ + int singleToDoublePrecisionPerfRatio; /**< Ratio of single precision performance (in floating-point operations per second) to double precision performance */ + int pageableMemoryAccess; /**< Device supports coherently accessing pageable memory without calling cudaHostRegister on it */ + int concurrentManagedAccess; /**< Device can coherently access managed memory concurrently with the CPU */ + int computePreemptionSupported; /**< Device supports Compute Preemption */ + int canUseHostPointerForRegisteredMem; /**< Device can access host registered memory at the same virtual address as the CPU */ + int cooperativeLaunch; /**< Device supports launching cooperative kernels via ::cudaLaunchCooperativeKernel */ + int cooperativeMultiDeviceLaunch; /**< Deprecated, cudaLaunchCooperativeKernelMultiDevice is deprecated. */ + size_t sharedMemPerBlockOptin; /**< Per device maximum shared memory per block usable by special opt in */ + int pageableMemoryAccessUsesHostPageTables; /**< Device accesses pageable memory via the host's page tables */ + int directManagedMemAccessFromHost; /**< Host can directly access managed memory on the device without migration. */ + int maxBlocksPerMultiProcessor; /**< Maximum number of resident blocks per multiprocessor */ + int accessPolicyMaxWindowSize; /**< The maximum value of ::cudaAccessPolicyWindow::num_bytes. */ + size_t reservedSharedMemPerBlock; /**< Shared memory reserved by CUDA driver per block in bytes */ + } cudaDeviceProp_t; + +typedef struct cudart_handle { + void *handle; + uint16_t verbose; + cudartReturn_t (*cudaSetDevice)(int device); + cudartReturn_t (*cudaDeviceSynchronize)(void); + cudartReturn_t (*cudaDeviceReset)(void); + cudartReturn_t (*cudaMemGetInfo)(size_t *, size_t *); + cudartReturn_t (*cudaGetDeviceCount)(int *); + cudartReturn_t (*cudaDeviceGetAttribute)(int* value, cudartDeviceAttr_t attr, int device); + cudartReturn_t (*cudaDriverGetVersion) (int *driverVersion); + cudartReturn_t (*cudaGetDeviceProperties) (cudaDeviceProp_t* prop, int device); +} cudart_handle_t; + +typedef struct cudart_init_resp { + char *err; // If err is non-null handle is invalid + cudart_handle_t ch; + int num_devices; +} cudart_init_resp_t; + +void cudart_init(char *cudart_lib_path, cudart_init_resp_t *resp); +void cudart_bootstrap(cudart_handle_t ch, int device_id, mem_info_t *resp); +// TODO - if we keep this library longer term, add cudart_get_free +void cudart_release(cudart_handle_t ch); + +#endif // __GPU_INFO_CUDART_H__ +#endif // __APPLE__ diff --git a/discover/gpu_info_darwin.h b/discover/gpu_info_darwin.h new file mode 100644 index 0000000..415e792 --- /dev/null +++ b/discover/gpu_info_darwin.h @@ -0,0 +1,5 @@ +#import +#include +uint64_t getRecommendedMaxVRAM(); +uint64_t getPhysicalMemory(); +uint64_t getFreeMemory(); diff --git a/discover/gpu_info_darwin.m b/discover/gpu_info_darwin.m new file mode 100644 index 0000000..5ca139e --- /dev/null +++ b/discover/gpu_info_darwin.m @@ -0,0 +1,35 @@ +#import +#import +#include "gpu_info_darwin.h" + +uint64_t getRecommendedMaxVRAM() { + id device = MTLCreateSystemDefaultDevice(); + uint64_t result = device.recommendedMaxWorkingSetSize; + CFRelease(device); + return result; +} + +// getPhysicalMemory returns the total physical memory in bytes +uint64_t getPhysicalMemory() { + return [NSProcessInfo processInfo].physicalMemory; +} + +// getFreeMemory returns the total free memory in bytes, including inactive +// memory that can be reclaimed by the system. +uint64_t getFreeMemory() { + mach_port_t host_port = mach_host_self(); + mach_msg_type_number_t host_size = sizeof(vm_statistics64_data_t) / sizeof(integer_t); + vm_size_t pagesize; + vm_statistics64_data_t vm_stat; + + host_page_size(host_port, &pagesize); + if (host_statistics64(host_port, HOST_VM_INFO64, (host_info64_t)&vm_stat, &host_size) != KERN_SUCCESS) { + return 0; + } + + uint64_t free_memory = (uint64_t)vm_stat.free_count * pagesize; + free_memory += (uint64_t)vm_stat.speculative_count * pagesize; + free_memory += (uint64_t)vm_stat.inactive_count * pagesize; + + return free_memory; +} diff --git a/discover/gpu_info_nvcuda.c b/discover/gpu_info_nvcuda.c new file mode 100644 index 0000000..d2d0b68 --- /dev/null +++ b/discover/gpu_info_nvcuda.c @@ -0,0 +1,251 @@ +#ifndef __APPLE__ // TODO - maybe consider nvidia support on intel macs? + +#include +#include +#include "gpu_info_nvcuda.h" + +void nvcuda_init(char *nvcuda_lib_path, nvcuda_init_resp_t *resp) { + LOG(resp->ch.verbose, "initializing %s\n", nvcuda_lib_path); + CUresult ret; + resp->err = NULL; + resp->num_devices = 0; + resp->cudaErr = CUDA_SUCCESS; + const int buflen = 256; + char buf[buflen + 1]; + int i; + + struct lookup { + char *s; + void **p; + } l[] = { + + {"cuInit", (void *)&resp->ch.cuInit}, + {"cuDriverGetVersion", (void *)&resp->ch.cuDriverGetVersion}, + {"cuDeviceGetCount", (void *)&resp->ch.cuDeviceGetCount}, + {"cuDeviceGet", (void *)&resp->ch.cuDeviceGet}, + {"cuDeviceGetAttribute", (void *)&resp->ch.cuDeviceGetAttribute}, + {"cuDeviceGetUuid", (void *)&resp->ch.cuDeviceGetUuid}, + {"cuDeviceGetName", (void *)&resp->ch.cuDeviceGetName}, + {"cuCtxCreate_v3", (void *)&resp->ch.cuCtxCreate_v3}, + {"cuMemGetInfo_v2", (void *)&resp->ch.cuMemGetInfo_v2}, + {"cuCtxDestroy", (void *)&resp->ch.cuCtxDestroy}, + {NULL, NULL}, + }; + + resp->ch.handle = LOAD_LIBRARY(nvcuda_lib_path, RTLD_LAZY); + if (!resp->ch.handle) { + char *msg = LOAD_ERR(); + LOG(resp->ch.verbose, "library %s load err: %s\n", nvcuda_lib_path, msg); + snprintf(buf, buflen, + "Unable to load %s library to query for Nvidia GPUs: %s", + nvcuda_lib_path, msg); + free(msg); + resp->err = strdup(buf); + resp->cudaErr = -1; + return; + } + + for (i = 0; l[i].s != NULL; i++) { + *l[i].p = LOAD_SYMBOL(resp->ch.handle, l[i].s); + if (!*(l[i].p)) { + char *msg = LOAD_ERR(); + LOG(resp->ch.verbose, "dlerr: %s\n", msg); + UNLOAD_LIBRARY(resp->ch.handle); + resp->ch.handle = NULL; + snprintf(buf, buflen, "symbol lookup for %s failed: %s", l[i].s, + msg); + free(msg); + resp->err = strdup(buf); + resp->cudaErr = -1; + return; + } + LOG(resp->ch.verbose, "dlsym: %s - %p\n", l[i].s, *l[i].p); + } + + LOG(resp->ch.verbose, "calling cuInit\n"); + ret = (*resp->ch.cuInit)(0); + if (ret != CUDA_SUCCESS) { + LOG(resp->ch.verbose, "cuInit err: %d\n", ret); + UNLOAD_LIBRARY(resp->ch.handle); + resp->ch.handle = NULL; + snprintf(buf, buflen, "cuda driver library init failure: %d", ret); + resp->err = strdup(buf); + resp->cudaErr = ret; + return; + } + + int version = 0; + resp->ch.driver_major = 0; + resp->ch.driver_minor = 0; + + // Report driver version if we're in verbose mode, ignore errors + LOG(resp->ch.verbose, "calling cuDriverGetVersion\n"); + ret = (*resp->ch.cuDriverGetVersion)(&version); + if (ret != CUDA_SUCCESS) { + LOG(resp->ch.verbose, "cuDriverGetVersion failed: %d\n", ret); + } else { + LOG(resp->ch.verbose, "raw version 0x%x\n", version); + resp->ch.driver_major = version / 1000; + resp->ch.driver_minor = (version - (resp->ch.driver_major * 1000)) / 10; + LOG(resp->ch.verbose, "CUDA driver version: %d.%d\n", resp->ch.driver_major, resp->ch.driver_minor); + } + + LOG(resp->ch.verbose, "calling cuDeviceGetCount\n"); + ret = (*resp->ch.cuDeviceGetCount)(&resp->num_devices); + if (ret != CUDA_SUCCESS) { + LOG(resp->ch.verbose, "cuDeviceGetCount err: %d\n", ret); + UNLOAD_LIBRARY(resp->ch.handle); + resp->ch.handle = NULL; + snprintf(buf, buflen, "unable to get device count: %d", ret); + resp->err = strdup(buf); + resp->cudaErr = ret; + return; + } + LOG(resp->ch.verbose, "device count %d\n", resp->num_devices); +} + +const int buflen = 256; +void nvcuda_bootstrap(nvcuda_handle_t h, int i, mem_info_t *resp) { + resp->err = NULL; + nvcudaMemory_t memInfo = {0,0}; + CUresult ret; + CUdevice device = -1; + CUcontext ctx = NULL; + char buf[buflen + 1]; + CUuuid uuid = {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0}; + + if (h.handle == NULL) { + resp->err = strdup("cuda driver library handle isn't initialized"); + return; + } + + ret = (*h.cuDeviceGet)(&device, i); + if (ret != CUDA_SUCCESS) { + snprintf(buf, buflen, "cuda driver library device failed to initialize"); + resp->err = strdup(buf); + return; + } + + int major = 0; + int minor = 0; + ret = (*h.cuDeviceGetAttribute)(&major, CU_DEVICE_ATTRIBUTE_COMPUTE_CAPABILITY_MAJOR, device); + if (ret != CUDA_SUCCESS) { + LOG(h.verbose, "[%d] device major lookup failure: %d\n", i, ret); + } else { + ret = (*h.cuDeviceGetAttribute)(&minor, CU_DEVICE_ATTRIBUTE_COMPUTE_CAPABILITY_MINOR, device); + if (ret != CUDA_SUCCESS) { + LOG(h.verbose, "[%d] device minor lookup failure: %d\n", i, ret); + } else { + resp->minor = minor; + resp->major = major; + } + } + + ret = (*h.cuDeviceGetUuid)(&uuid, device); + if (ret != CUDA_SUCCESS) { + LOG(h.verbose, "[%d] device uuid lookup failure: %d\n", i, ret); + snprintf(&resp->gpu_id[0], GPU_ID_LEN, "%d", i); + } else { + // GPU-d110a105-ac29-1d54-7b49-9c90440f215b + snprintf(&resp->gpu_id[0], GPU_ID_LEN, + "GPU-%02x%02x%02x%02x-%02x%02x-%02x%02x-%02x%02x-%02x%02x%02x%02x%02x%02x", + uuid.bytes[0], + uuid.bytes[1], + uuid.bytes[2], + uuid.bytes[3], + uuid.bytes[4], + uuid.bytes[5], + uuid.bytes[6], + uuid.bytes[7], + uuid.bytes[8], + uuid.bytes[9], + uuid.bytes[10], + uuid.bytes[11], + uuid.bytes[12], + uuid.bytes[13], + uuid.bytes[14], + uuid.bytes[15] + ); + } + + ret = (*h.cuDeviceGetName)(&resp->gpu_name[0], GPU_NAME_LEN, device); + if (ret != CUDA_SUCCESS) { + LOG(h.verbose, "[%d] device name lookup failure: %d\n", i, ret); + resp->gpu_name[0] = '\0'; + } + + // To get memory we have to set (and release) a context + ret = (*h.cuCtxCreate_v3)(&ctx, NULL, 0, 0, device); + if (ret != CUDA_SUCCESS) { + snprintf(buf, buflen, "cuda driver library failed to get device context %d", ret); + resp->err = strdup(buf); + return; + } + + ret = (*h.cuMemGetInfo_v2)(&memInfo.free, &memInfo.total); + if (ret != CUDA_SUCCESS) { + snprintf(buf, buflen, "cuda driver library device memory info lookup failure %d", ret); + resp->err = strdup(buf); + // Best effort on failure... + (*h.cuCtxDestroy)(ctx); + return; + } + + resp->total = memInfo.total; + resp->free = memInfo.free; + + LOG(h.verbose, "[%s] CUDA totalMem %" PRId64 "mb\n", resp->gpu_id, resp->total / 1024 / 1024); + LOG(h.verbose, "[%s] CUDA freeMem %" PRId64 "mb\n", resp->gpu_id, resp->free / 1024 / 1024); + LOG(h.verbose, "[%s] Compute Capability %d.%d\n", resp->gpu_id, resp->major, resp->minor); + + + + ret = (*h.cuCtxDestroy)(ctx); + if (ret != CUDA_SUCCESS) { + LOG(1, "cuda driver library failed to release device context %d", ret); + } +} + +void nvcuda_get_free(nvcuda_handle_t h, int i, uint64_t *free, uint64_t *total) { + CUresult ret; + CUcontext ctx = NULL; + CUdevice device = -1; + *free = 0; + *total = 0; + + ret = (*h.cuDeviceGet)(&device, i); + if (ret != CUDA_SUCCESS) { + LOG(1, "cuda driver library device failed to initialize"); + return; + } + + + // To get memory we have to set (and release) a context + ret = (*h.cuCtxCreate_v3)(&ctx, NULL, 0, 0, device); + if (ret != CUDA_SUCCESS) { + LOG(1, "cuda driver library failed to get device context %d", ret); + return; + } + + ret = (*h.cuMemGetInfo_v2)(free, total); + if (ret != CUDA_SUCCESS) { + LOG(1, "cuda driver library device memory info lookup failure %d", ret); + // Best effort on failure... + (*h.cuCtxDestroy)(ctx); + return; + } + + ret = (*h.cuCtxDestroy)(ctx); + if (ret != CUDA_SUCCESS) { + LOG(1, "cuda driver library failed to release device context %d", ret); + } +} + +void nvcuda_release(nvcuda_handle_t h) { + LOG(h.verbose, "releasing cuda driver library\n"); + UNLOAD_LIBRARY(h.handle); + // TODO and other context release logic? + h.handle = NULL; +} + +#endif // __APPLE__ diff --git a/discover/gpu_info_nvcuda.h b/discover/gpu_info_nvcuda.h new file mode 100644 index 0000000..ef2fe8a --- /dev/null +++ b/discover/gpu_info_nvcuda.h @@ -0,0 +1,79 @@ +#ifndef __APPLE__ +#ifndef __GPU_INFO_NVCUDA_H__ +#define __GPU_INFO_NVCUDA_H__ +#include "gpu_info.h" + +// Just enough typedef's to dlopen/dlsym for memory information +typedef enum cudaError_enum { + CUDA_SUCCESS = 0, + CUDA_ERROR_INVALID_VALUE = 1, + CUDA_ERROR_OUT_OF_MEMORY = 2, + CUDA_ERROR_NOT_INITIALIZED = 3, + CUDA_ERROR_INSUFFICIENT_DRIVER = 35, + CUDA_ERROR_NO_DEVICE = 100, + CUDA_ERROR_SYSTEM_DRIVER_MISMATCH = 803, + CUDA_ERROR_UNKNOWN = 999, + // Other values omitted for now... +} CUresult; + +typedef enum CUdevice_attribute_enum { + CU_DEVICE_ATTRIBUTE_COMPUTE_CAPABILITY_MAJOR = 75, + CU_DEVICE_ATTRIBUTE_COMPUTE_CAPABILITY_MINOR = 76, + + // TODO - not yet wired up but may be useful for Jetson or other + // integrated GPU scenarios with shared memory + CU_DEVICE_ATTRIBUTE_INTEGRATED = 18 + +} CUdevice_attribute; + +typedef void *nvcudaDevice_t; // Opaque is sufficient +typedef struct nvcudaMemory_st { + uint64_t total; + uint64_t free; +} nvcudaMemory_t; + +typedef struct nvcudaDriverVersion { + int major; + int minor; +} nvcudaDriverVersion_t; + +typedef struct CUuuid_st { + unsigned char bytes[16]; +} CUuuid; + +typedef int CUdevice; +typedef void* CUcontext; + +typedef struct nvcuda_handle { + void *handle; + uint16_t verbose; + int driver_major; + int driver_minor; + CUresult (*cuInit)(unsigned int Flags); + CUresult (*cuDriverGetVersion)(int *driverVersion); + CUresult (*cuDeviceGetCount)(int *); + CUresult (*cuDeviceGet)(CUdevice* device, int ordinal); + CUresult (*cuDeviceGetAttribute)(int* pi, CUdevice_attribute attrib, CUdevice dev); + CUresult (*cuDeviceGetUuid)(CUuuid* uuid, CUdevice dev); // signature compatible with cuDeviceGetUuid_v2 + CUresult (*cuDeviceGetName)(char *name, int len, CUdevice dev); + + // Context specific aspects + CUresult (*cuCtxCreate_v3)(CUcontext* pctx, void *params, int len, unsigned int flags, CUdevice dev); + CUresult (*cuMemGetInfo_v2)(uint64_t* free, uint64_t* total); + CUresult (*cuCtxDestroy)(CUcontext ctx); +} nvcuda_handle_t; + +typedef struct nvcuda_init_resp { + char *err; // If err is non-null handle is invalid + nvcuda_handle_t ch; + int num_devices; + CUresult cudaErr; +} nvcuda_init_resp_t; + +void nvcuda_init(char *nvcuda_lib_path, nvcuda_init_resp_t *resp); +void nvcuda_bootstrap(nvcuda_handle_t ch, int device_id, mem_info_t *resp); +void nvcuda_get_free(nvcuda_handle_t ch, int device_id, uint64_t *free, uint64_t *total); +void nvcuda_release(nvcuda_handle_t ch); + +#endif // __GPU_INFO_NVCUDA_H__ +#endif // __APPLE__ diff --git a/discover/gpu_info_nvml.c b/discover/gpu_info_nvml.c new file mode 100644 index 0000000..342a3aa --- /dev/null +++ b/discover/gpu_info_nvml.c @@ -0,0 +1,104 @@ +#ifndef __APPLE__ // TODO - maybe consider nvidia support on intel macs? + +#include + +#include "gpu_info_nvml.h" + +void nvml_init(char *nvml_lib_path, nvml_init_resp_t *resp) { + nvmlReturn_t ret; + resp->err = NULL; + const int buflen = 256; + char buf[buflen + 1]; + int i; + + struct lookup { + char *s; + void **p; + } l[] = { + {"nvmlInit_v2", (void *)&resp->ch.nvmlInit_v2}, + {"nvmlShutdown", (void *)&resp->ch.nvmlShutdown}, + {"nvmlDeviceGetHandleByUUID", (void *)&resp->ch.nvmlDeviceGetHandleByUUID}, + {"nvmlDeviceGetMemoryInfo", (void *)&resp->ch.nvmlDeviceGetMemoryInfo}, + {NULL, NULL}, + }; + + resp->ch.handle = LOAD_LIBRARY(nvml_lib_path, RTLD_LAZY); + if (!resp->ch.handle) { + char *msg = LOAD_ERR(); + LOG(resp->ch.verbose, "library %s load err: %s\n", nvml_lib_path, msg); + snprintf(buf, buflen, + "Unable to load %s library to query for Nvidia GPUs: %s", + nvml_lib_path, msg); + free(msg); + resp->err = strdup(buf); + return; + } + + // TODO once we've squashed the remaining corner cases remove this log + // LOG(resp->ch.verbose, "wiring nvidia management library functions in %s\n", nvml_lib_path); + + for (i = 0; l[i].s != NULL; i++) { + // TODO once we've squashed the remaining corner cases remove this log + // LOG(resp->ch.verbose, "dlsym: %s\n", l[i].s); + + *l[i].p = LOAD_SYMBOL(resp->ch.handle, l[i].s); + if (!*(l[i].p)) { + resp->ch.handle = NULL; + char *msg = LOAD_ERR(); + LOG(resp->ch.verbose, "dlerr: %s\n", msg); + UNLOAD_LIBRARY(resp->ch.handle); + snprintf(buf, buflen, "symbol lookup for %s failed: %s", l[i].s, + msg); + free(msg); + resp->err = strdup(buf); + return; + } + } + + ret = (*resp->ch.nvmlInit_v2)(); + if (ret != NVML_SUCCESS) { + LOG(resp->ch.verbose, "nvmlInit_v2 err: %d\n", ret); + UNLOAD_LIBRARY(resp->ch.handle); + resp->ch.handle = NULL; + snprintf(buf, buflen, "nvml vram init failure: %d", ret); + resp->err = strdup(buf); + return; + } +} + + +void nvml_get_free(nvml_handle_t h, char *uuid, uint64_t *free, uint64_t *total, uint64_t *used) { + nvmlDevice_t device; + nvmlMemory_t memInfo = {0}; + nvmlReturn_t ret; + ret = (*h.nvmlDeviceGetHandleByUUID)((const char *)(uuid), &device); + if (ret != NVML_SUCCESS) { + LOG(1, "unable to get device handle %s: %d", uuid, ret); + *free = 0; + return; + } + + ret = (*h.nvmlDeviceGetMemoryInfo)(device, &memInfo); + if (ret != NVML_SUCCESS) { + LOG(1, "device memory info lookup failure %s: %d", uuid, ret); + *free = 0; + return; + } + *free = memInfo.free; + *total = memInfo.total; + *used = memInfo.used; +} + + +void nvml_release(nvml_handle_t h) { + LOG(h.verbose, "releasing nvml library\n"); + nvmlReturn_t ret; + ret = (*h.nvmlShutdown)(); + if (ret != NVML_SUCCESS) { + LOG(1, "error during nvmlShutdown %d", ret); + } + UNLOAD_LIBRARY(h.handle); + h.handle = NULL; +} + +#endif // __APPLE__ \ No newline at end of file diff --git a/discover/gpu_info_nvml.h b/discover/gpu_info_nvml.h new file mode 100644 index 0000000..9088023 --- /dev/null +++ b/discover/gpu_info_nvml.h @@ -0,0 +1,48 @@ +#ifndef __APPLE__ +#ifndef __GPU_INFO_NVML_H__ +#define __GPU_INFO_NVML_H__ +#include "gpu_info.h" + +// Just enough typedef's to dlopen/dlsym for memory information +typedef enum nvmlReturn_enum { + NVML_SUCCESS = 0, + // Other values omitted for now... +} nvmlReturn_t; +typedef void *nvmlDevice_t; // Opaque is sufficient +typedef struct nvmlMemory_st { + unsigned long long total; + unsigned long long free; + unsigned long long used; +} nvmlMemory_t; + +typedef enum nvmlBrandType_enum +{ + NVML_BRAND_UNKNOWN = 0, +} nvmlBrandType_t; + +typedef struct nvml_handle { + void *handle; + uint16_t verbose; + nvmlReturn_t (*nvmlInit_v2)(void); + nvmlReturn_t (*nvmlShutdown)(void); + nvmlReturn_t (*nvmlDeviceGetHandleByUUID)(const char *, nvmlDevice_t *); + nvmlReturn_t (*nvmlDeviceGetMemoryInfo)(nvmlDevice_t, nvmlMemory_t *); +} nvml_handle_t; + +typedef struct nvml_init_resp { + char *err; // If err is non-null handle is invalid + nvml_handle_t ch; +} nvml_init_resp_t; + +typedef struct nvml_compute_capability { + char *err; + int major; + int minor; +} nvml_compute_capability_t; + +void nvml_init(char *nvml_lib_path, nvml_init_resp_t *resp); +void nvml_get_free(nvml_handle_t ch, char *uuid, uint64_t *free, uint64_t *total, uint64_t *used); +void nvml_release(nvml_handle_t ch); + +#endif // __GPU_INFO_NVML_H__ +#endif // __APPLE__ \ No newline at end of file diff --git a/discover/gpu_info_oneapi.c b/discover/gpu_info_oneapi.c new file mode 100644 index 0000000..3ff708e --- /dev/null +++ b/discover/gpu_info_oneapi.c @@ -0,0 +1,259 @@ +#ifndef __APPLE__ + +#include "gpu_info_oneapi.h" + +#include + +void oneapi_init(char *oneapi_lib_path, oneapi_init_resp_t *resp) { + ze_result_t ret; + resp->err = NULL; + resp->oh.devices = NULL; + resp->oh.num_devices = NULL; + resp->oh.drivers = NULL; + resp->oh.num_drivers = 0; + const int buflen = 256; + char buf[buflen + 1]; + int i, d; + struct lookup { + char *s; + void **p; + } l[] = { + {"zesInit", (void *)&resp->oh.zesInit}, + {"zesDriverGet", (void *)&resp->oh.zesDriverGet}, + {"zesDeviceGet", (void *)&resp->oh.zesDeviceGet}, + {"zesDeviceGetProperties", (void *)&resp->oh.zesDeviceGetProperties}, + {"zesDeviceEnumMemoryModules", + (void *)&resp->oh.zesDeviceEnumMemoryModules}, + {"zesMemoryGetProperties", (void *)&resp->oh.zesMemoryGetProperties}, + {"zesMemoryGetState", (void *)&resp->oh.zesMemoryGetState}, + {NULL, NULL}, + }; + + resp->oh.handle = LOAD_LIBRARY(oneapi_lib_path, RTLD_LAZY); + if (!resp->oh.handle) { + char *msg = LOAD_ERR(); + snprintf(buf, buflen, + "Unable to load %s library to query for Intel GPUs: %s\n", + oneapi_lib_path, msg); + free(msg); + resp->err = strdup(buf); + return; + } + + // TODO once we've squashed the remaining corner cases remove this log + LOG(resp->oh.verbose, + "wiring Level-Zero management library functions in %s\n", + oneapi_lib_path); + + for (i = 0; l[i].s != NULL; i++) { + // TODO once we've squashed the remaining corner cases remove this log + LOG(resp->oh.verbose, "dlsym: %s\n", l[i].s); + + *l[i].p = LOAD_SYMBOL(resp->oh.handle, l[i].s); + if (!*(l[i].p)) { + resp->oh.handle = NULL; + char *msg = LOAD_ERR(); + LOG(resp->oh.verbose, "dlerr: %s\n", msg); + UNLOAD_LIBRARY(resp->oh.handle); + snprintf(buf, buflen, "symbol lookup for %s failed: %s", l[i].s, msg); + free(msg); + resp->err = strdup(buf); + return; + } + } + + LOG(resp->oh.verbose, "calling zesInit\n"); + + ret = (*resp->oh.zesInit)(0); + if (ret != ZE_RESULT_SUCCESS) { + LOG(resp->oh.verbose, "zesInit err: %x\n", ret); + snprintf(buf, buflen, "oneapi vram init failure: %x", ret); + resp->err = strdup(buf); + oneapi_release(resp->oh); + return; + } + + LOG(resp->oh.verbose, "calling zesDriverGet\n"); + ret = (*resp->oh.zesDriverGet)(&resp->oh.num_drivers, NULL); + if (ret != ZE_RESULT_SUCCESS) { + LOG(resp->oh.verbose, "zesDriverGet err: %x\n", ret); + snprintf(buf, buflen, "unable to get driver count: %x", ret); + resp->err = strdup(buf); + oneapi_release(resp->oh); + return; + } + LOG(resp->oh.verbose, "oneapi driver count: %d\n", resp->oh.num_drivers); + resp->oh.drivers = malloc(resp->oh.num_drivers * sizeof(zes_driver_handle_t)); + resp->oh.num_devices = malloc(resp->oh.num_drivers * sizeof(uint32_t)); + memset(&resp->oh.num_devices[0], 0, resp->oh.num_drivers * sizeof(uint32_t)); + resp->oh.devices = + malloc(resp->oh.num_drivers * sizeof(zes_device_handle_t *)); + ret = (*resp->oh.zesDriverGet)(&resp->oh.num_drivers, &resp->oh.drivers[0]); + if (ret != ZE_RESULT_SUCCESS) { + LOG(resp->oh.verbose, "zesDriverGet err: %x\n", ret); + snprintf(buf, buflen, "unable to get driver count: %x", ret); + resp->err = strdup(buf); + oneapi_release(resp->oh); + return; + } + + for (d = 0; d < resp->oh.num_drivers; d++) { + LOG(resp->oh.verbose, "calling zesDeviceGet count %d: %p\n", d, resp->oh.drivers[d]); + ret = (*resp->oh.zesDeviceGet)(resp->oh.drivers[d], + &resp->oh.num_devices[d], NULL); + if (ret != ZE_RESULT_SUCCESS) { + LOG(resp->oh.verbose, "zesDeviceGet err: %x\n", ret); + snprintf(buf, buflen, "unable to get device count: %x", ret); + resp->err = strdup(buf); + oneapi_release(resp->oh); + return; + } + resp->oh.devices[d] = + malloc(resp->oh.num_devices[d] * sizeof(zes_device_handle_t)); + ret = (*resp->oh.zesDeviceGet)( + resp->oh.drivers[d], &resp->oh.num_devices[d], resp->oh.devices[d]); + if (ret != ZE_RESULT_SUCCESS) { + LOG(resp->oh.verbose, "zesDeviceGet err: %x\n", ret); + snprintf(buf, buflen, "unable to get device count: %x", ret); + resp->err = strdup(buf); + oneapi_release(resp->oh); + return; + } + } + + return; +} + +void oneapi_check_vram(oneapi_handle_t h, int driver, int device, + mem_info_t *resp) { + ze_result_t ret; + resp->err = NULL; + uint64_t totalMem = 0; + uint64_t usedMem = 0; + const int buflen = 256; + char buf[buflen + 1]; + int i, d, m; + + if (h.handle == NULL) { + resp->err = strdup("Level-Zero handle not initialized"); + return; + } + + if (driver > h.num_drivers || device > h.num_devices[driver]) { + resp->err = strdup("driver of device index out of bounds"); + return; + } + + resp->total = 0; + resp->free = 0; + + zes_device_ext_properties_t ext_props; + ext_props.stype = ZES_STRUCTURE_TYPE_DEVICE_EXT_PROPERTIES; + ext_props.pNext = NULL; + + zes_device_properties_t props; + props.stype = ZES_STRUCTURE_TYPE_DEVICE_PROPERTIES; + props.pNext = &ext_props; + + ret = (*h.zesDeviceGetProperties)(h.devices[driver][device], &props); + if (ret != ZE_RESULT_SUCCESS) { + snprintf(buf, buflen, "unable to get device properties: %d", ret); + resp->err = strdup(buf); + return; + } + + snprintf(&resp->gpu_name[0], GPU_NAME_LEN, "%s", props.modelName); + + // TODO this needs to map to ONEAPI_DEVICE_SELECTOR syntax + // (this is probably wrong...) + // TODO - the driver isn't included - what if there are multiple drivers? + snprintf(&resp->gpu_id[0], GPU_ID_LEN, "%d", device); + + if (h.verbose) { + // When in verbose mode, report more information about + // the card we discover. + LOG(h.verbose, "[%d:%d] oneAPI device name: %s\n", driver, device, + props.modelName); + LOG(h.verbose, "[%d:%d] oneAPI brand: %s\n", driver, device, + props.brandName); + LOG(h.verbose, "[%d:%d] oneAPI vendor: %s\n", driver, device, + props.vendorName); + LOG(h.verbose, "[%d:%d] oneAPI S/N: %s\n", driver, device, + props.serialNumber); + LOG(h.verbose, "[%d:%d] oneAPI board number: %s\n", driver, device, + props.boardNumber); + } + + // TODO + // Compute Capability equivalent in resp->major, resp->minor, resp->patch + + uint32_t memCount = 0; + ret = (*h.zesDeviceEnumMemoryModules)(h.devices[driver][device], &memCount, + NULL); + if (ret != ZE_RESULT_SUCCESS) { + snprintf(buf, buflen, "unable to enumerate Level-Zero memory modules: %x", + ret); + resp->err = strdup(buf); + return; + } + + LOG(h.verbose, "discovered %d Level-Zero memory modules\n", memCount); + + zes_mem_handle_t *mems = malloc(memCount * sizeof(zes_mem_handle_t)); + (*h.zesDeviceEnumMemoryModules)(h.devices[driver][device], &memCount, mems); + + for (m = 0; m < memCount; m++) { + zes_mem_state_t state; + state.stype = ZES_STRUCTURE_TYPE_MEM_STATE; + state.pNext = NULL; + ret = (*h.zesMemoryGetState)(mems[m], &state); + if (ret != ZE_RESULT_SUCCESS) { + snprintf(buf, buflen, "unable to get memory state: %x", ret); + resp->err = strdup(buf); + free(mems); + return; + } + + resp->total += state.size; + resp->free += state.free; + } + + free(mems); +} + +void oneapi_release(oneapi_handle_t h) { + int d; + LOG(h.verbose, "releasing oneapi library\n"); + for (d = 0; d < h.num_drivers; d++) { + if (h.devices != NULL && h.devices[d] != NULL) { + free(h.devices[d]); + } + } + if (h.devices != NULL) { + free(h.devices); + h.devices = NULL; + } + if (h.num_devices != NULL) { + free(h.num_devices); + h.num_devices = NULL; + } + if (h.drivers != NULL) { + free(h.drivers); + h.drivers = NULL; + } + h.num_drivers = 0; + UNLOAD_LIBRARY(h.handle); + h.handle = NULL; +} + +int oneapi_get_device_count(oneapi_handle_t h, int driver) { + if (h.handle == NULL || h.num_devices == NULL) { + return 0; + } + if (driver > h.num_drivers) { + return 0; + } + return (int)h.num_devices[driver]; +} + +#endif // __APPLE__ diff --git a/discover/gpu_info_oneapi.h b/discover/gpu_info_oneapi.h new file mode 100644 index 0000000..97fcecd --- /dev/null +++ b/discover/gpu_info_oneapi.h @@ -0,0 +1,203 @@ +#ifndef __APPLE__ +#ifndef __GPU_INFO_ONEAPI_H__ +#define __GPU_INFO_ONEAPI_H__ +#include "gpu_info.h" + +#define ZE_MAX_DEVICE_NAME 256 +#define ZE_MAX_DEVICE_UUID_SIZE 16 +#define ZES_STRING_PROPERTY_SIZE 64 +#define ZE_BIT(_i) (1 << _i) + +// Just enough typedef's to dlopen/dlsym for memory information +typedef enum ze_result_t { + ZE_RESULT_SUCCESS = 0, + // Other values omitted for now... +} ze_result_t; + +typedef uint8_t ze_bool_t; +typedef struct _zes_driver_handle_t *zes_driver_handle_t; +typedef struct _zes_device_handle_t *zes_device_handle_t; +typedef struct _zes_mem_handle_t *zes_mem_handle_t; + +typedef enum _ze_structure_type_t { + ZE_STRUCTURE_TYPE_FORCE_UINT32 = 0x7fffffff +} ze_structure_type_t; + +typedef enum _zes_structure_type_t { + ZES_STRUCTURE_TYPE_DEVICE_PROPERTIES = 0x1, + ZES_STRUCTURE_TYPE_MEM_PROPERTIES = 0xb, + ZES_STRUCTURE_TYPE_MEM_STATE = 0x1e, + ZES_STRUCTURE_TYPE_DEVICE_EXT_PROPERTIES = 0x2d, + ZES_STRUCTURE_TYPE_FORCE_UINT32 = 0x7fffffff +} zes_structure_type_t; + +typedef enum _zes_mem_type_t { + ZES_MEM_TYPE_FORCE_UINT32 = 0x7fffffff +} zes_mem_type_t; + +typedef enum _zes_mem_loc_t { + ZES_MEM_LOC_SYSTEM = 0, + ZES_MEM_LOC_DEVICE = 1, + ZES_MEM_LOC_FORCE_UINT32 = 0x7fffffff +} zes_mem_loc_t; + +typedef enum _zes_mem_health_t { + ZES_MEM_HEALTH_FORCE_UINT32 = 0x7fffffff +} zes_mem_health_t; + +typedef struct _ze_device_uuid_t { + uint8_t id[ZE_MAX_DEVICE_UUID_SIZE]; +} ze_device_uuid_t; + +typedef struct _zes_uuid_t { + uint8_t id[ZE_MAX_DEVICE_UUID_SIZE]; +} zes_uuid_t; + +typedef enum _ze_device_type_t { + ZE_DEVICE_TYPE_GPU = 1, + ZE_DEVICE_TYPE_CPU = 2, + ZE_DEVICE_TYPE_FPGA = 3, + ZE_DEVICE_TYPE_MCA = 4, + ZE_DEVICE_TYPE_VPU = 5, + ZE_DEVICE_TYPE_FORCE_UINT32 = 0x7fffffff +} ze_device_type_t; + +typedef enum _zes_device_type_t { + ZES_DEVICE_TYPE_GPU = 1, + ZES_DEVICE_TYPE_CPU = 2, + ZES_DEVICE_TYPE_FPGA = 3, + ZES_DEVICE_TYPE_MCA = 4, + ZES_DEVICE_TYPE_VPU = 5, + ZES_DEVICE_TYPE_FORCE_UINT32 = 0x7fffffff +} zes_device_type_t; + +typedef uint32_t ze_device_property_flags_t; +typedef enum _ze_device_property_flag_t { + ZE_DEVICE_PROPERTY_FLAG_INTEGRATED = ZE_BIT(0), + ZE_DEVICE_PROPERTY_FLAG_SUBDEVICE = ZE_BIT(1), + ZE_DEVICE_PROPERTY_FLAG_ECC = ZE_BIT(2), + ZE_DEVICE_PROPERTY_FLAG_ONDEMANDPAGING = ZE_BIT(3), + ZE_DEVICE_PROPERTY_FLAG_FORCE_UINT32 = 0x7fffffff +} ze_device_property_flag_t; + +typedef uint32_t zes_device_property_flags_t; +typedef enum _zes_device_property_flag_t { + ZES_DEVICE_PROPERTY_FLAG_INTEGRATED = ZE_BIT(0), + ZES_DEVICE_PROPERTY_FLAG_SUBDEVICE = ZE_BIT(1), + ZES_DEVICE_PROPERTY_FLAG_ECC = ZE_BIT(2), + ZES_DEVICE_PROPERTY_FLAG_ONDEMANDPAGING = ZE_BIT(3), + ZES_DEVICE_PROPERTY_FLAG_FORCE_UINT32 = 0x7fffffff +} zes_device_property_flag_t; + +typedef struct _ze_device_properties_t { + ze_structure_type_t stype; + void *pNext; + ze_device_type_t type; + uint32_t vendorId; + uint32_t deviceId; + ze_device_property_flags_t flags; + uint32_t subdeviceId; + uint32_t coreClockRate; + uint64_t maxMemAllocSize; + uint32_t maxHardwareContexts; + uint32_t maxCommandQueuePriority; + uint32_t numThreadsPerEU; + uint32_t physicalEUSimdWidth; + uint32_t numEUsPerSubslice; + uint32_t numSubslicesPerSlice; + uint32_t numSlices; + uint64_t timerResolution; + uint32_t timestampValidBits; + uint32_t kernelTimestampValidBits; + ze_device_uuid_t uuid; + char name[ZE_MAX_DEVICE_NAME]; +} ze_device_properties_t; + +typedef struct _zes_device_properties_t { + zes_structure_type_t stype; + void *pNext; + ze_device_properties_t core; + uint32_t numSubdevices; + char serialNumber[ZES_STRING_PROPERTY_SIZE]; + char boardNumber[ZES_STRING_PROPERTY_SIZE]; + char brandName[ZES_STRING_PROPERTY_SIZE]; + char modelName[ZES_STRING_PROPERTY_SIZE]; + char vendorName[ZES_STRING_PROPERTY_SIZE]; + char driverVersion[ZES_STRING_PROPERTY_SIZE]; +} zes_device_properties_t; + +typedef struct _zes_device_ext_properties_t { + zes_structure_type_t stype; + void *pNext; + zes_uuid_t uuid; + zes_device_type_t type; + zes_device_property_flags_t flags; +} zes_device_ext_properties_t; + +typedef struct _zes_mem_properties_t { + zes_structure_type_t stype; + void *pNext; + zes_mem_type_t type; + ze_bool_t onSubdevice; + uint32_t subdeviceId; + zes_mem_loc_t location; + uint64_t physicalSize; + int32_t busWidth; + int32_t numChannels; +} zes_mem_properties_t; + +typedef struct _zes_mem_state_t { + zes_structure_type_t stype; + const void *pNext; + zes_mem_health_t health; + uint64_t free; + uint64_t size; +} zes_mem_state_t; + +typedef struct oneapi_handle { + void *handle; + uint16_t verbose; + + uint32_t num_drivers; + zes_driver_handle_t *drivers; + uint32_t *num_devices; + zes_device_handle_t **devices; + + // TODO Driver major, minor information + // int driver_major; + // int driver_minor; + + ze_result_t (*zesInit)(int); + ze_result_t (*zesDriverGet)(uint32_t *pCount, zes_driver_handle_t *phDrivers); + ze_result_t (*zesDeviceGet)(zes_driver_handle_t hDriver, uint32_t *pCount, + zes_device_handle_t *phDevices); + ze_result_t (*zesDeviceGetProperties)(zes_device_handle_t hDevice, + zes_device_properties_t *pProperties); + ze_result_t (*zesDeviceEnumMemoryModules)(zes_device_handle_t hDevice, + uint32_t *pCount, + zes_mem_handle_t *phMemory); + ze_result_t (*zesMemoryGetProperties)(zes_mem_handle_t hMemory, + zes_mem_properties_t *pProperties); + ze_result_t (*zesMemoryGetState)(zes_mem_handle_t hMemory, + zes_mem_state_t *pState); + +} oneapi_handle_t; + +typedef struct oneapi_init_resp { + char *err; // If err is non-null handle is invalid + oneapi_handle_t oh; +} oneapi_init_resp_t; + +typedef struct oneapi_version_resp { + ze_result_t status; + char *str; // Contains version or error string if status != 0 +} oneapi_version_resp_t; + +void oneapi_init(char *oneapi_lib_path, oneapi_init_resp_t *resp); +void oneapi_check_vram(oneapi_handle_t h, int driver, int device, + mem_info_t *resp); +void oneapi_release(oneapi_handle_t h); +int oneapi_get_device_count(oneapi_handle_t h, int driver); + +#endif // __GPU_INFO_INTEL_H__ +#endif // __APPLE__ diff --git a/discover/gpu_linux.go b/discover/gpu_linux.go new file mode 100644 index 0000000..44c53b4 --- /dev/null +++ b/discover/gpu_linux.go @@ -0,0 +1,198 @@ +package discover + +import ( + "bufio" + "fmt" + "io" + "os" + "reflect" + "regexp" + "sort" + "strings" + + "github.com/ollama/ollama/format" +) + +var CudartGlobs = []string{ + "/usr/local/cuda/lib64/libcudart.so*", + "/usr/lib/x86_64-linux-gnu/nvidia/current/libcudart.so*", + "/usr/lib/x86_64-linux-gnu/libcudart.so*", + "/usr/lib/wsl/lib/libcudart.so*", + "/usr/lib/wsl/drivers/*/libcudart.so*", + "/opt/cuda/lib64/libcudart.so*", + "/usr/local/cuda*/targets/aarch64-linux/lib/libcudart.so*", + "/usr/lib/aarch64-linux-gnu/nvidia/current/libcudart.so*", + "/usr/lib/aarch64-linux-gnu/libcudart.so*", + "/usr/local/cuda/lib*/libcudart.so*", + "/usr/lib*/libcudart.so*", + "/usr/local/lib*/libcudart.so*", +} + +var NvmlGlobs = []string{} + +var NvcudaGlobs = []string{ + "/usr/local/cuda*/targets/*/lib/libcuda.so*", + "/usr/lib/*-linux-gnu/nvidia/current/libcuda.so*", + "/usr/lib/*-linux-gnu/libcuda.so*", + "/usr/lib/wsl/lib/libcuda.so*", + "/usr/lib/wsl/drivers/*/libcuda.so*", + "/opt/cuda/lib*/libcuda.so*", + "/usr/local/cuda/lib*/libcuda.so*", + "/usr/lib*/libcuda.so*", + "/usr/local/lib*/libcuda.so*", +} + +var OneapiGlobs = []string{ + "/usr/lib/x86_64-linux-gnu/libze_intel_gpu.so*", + "/usr/lib*/libze_intel_gpu.so*", +} + +var ( + CudartMgmtName = "libcudart.so*" + NvcudaMgmtName = "libcuda.so*" + NvmlMgmtName = "" // not currently wired on linux + OneapiMgmtName = "libze_intel_gpu.so*" +) + +func GetCPUMem() (memInfo, error) { + var mem memInfo + var total, available, free, buffers, cached, freeSwap uint64 + f, err := os.Open("/proc/meminfo") + if err != nil { + return mem, err + } + defer f.Close() + s := bufio.NewScanner(f) + for s.Scan() { + line := s.Text() + switch { + case strings.HasPrefix(line, "MemTotal:"): + _, err = fmt.Sscanf(line, "MemTotal:%d", &total) + case strings.HasPrefix(line, "MemAvailable:"): + _, err = fmt.Sscanf(line, "MemAvailable:%d", &available) + case strings.HasPrefix(line, "MemFree:"): + _, err = fmt.Sscanf(line, "MemFree:%d", &free) + case strings.HasPrefix(line, "Buffers:"): + _, err = fmt.Sscanf(line, "Buffers:%d", &buffers) + case strings.HasPrefix(line, "Cached:"): + _, err = fmt.Sscanf(line, "Cached:%d", &cached) + case strings.HasPrefix(line, "SwapFree:"): + _, err = fmt.Sscanf(line, "SwapFree:%d", &freeSwap) + default: + continue + } + if err != nil { + return mem, err + } + } + mem.TotalMemory = total * format.KibiByte + mem.FreeSwap = freeSwap * format.KibiByte + if available > 0 { + mem.FreeMemory = available * format.KibiByte + } else { + mem.FreeMemory = (free + buffers + cached) * format.KibiByte + } + return mem, nil +} + +const CpuInfoFilename = "/proc/cpuinfo" + +type linuxCpuInfo struct { + ID string `cpuinfo:"processor"` + VendorID string `cpuinfo:"vendor_id"` + ModelName string `cpuinfo:"model name"` + PhysicalID string `cpuinfo:"physical id"` + Siblings string `cpuinfo:"siblings"` + CoreID string `cpuinfo:"core id"` +} + +func GetCPUDetails() ([]CPU, error) { + file, err := os.Open(CpuInfoFilename) + if err != nil { + return nil, err + } + defer file.Close() + return linuxCPUDetails(file) +} + +func linuxCPUDetails(file io.Reader) ([]CPU, error) { + reColumns := regexp.MustCompile("\t+: ") + scanner := bufio.NewScanner(file) + cpuInfos := []linuxCpuInfo{} + cpu := &linuxCpuInfo{} + for scanner.Scan() { + line := scanner.Text() + if sl := reColumns.Split(line, 2); len(sl) > 1 { + t := reflect.TypeOf(cpu).Elem() + s := reflect.ValueOf(cpu).Elem() + for i := range t.NumField() { + field := t.Field(i) + tag := field.Tag.Get("cpuinfo") + if tag == sl[0] { + s.FieldByName(field.Name).SetString(sl[1]) + break + } + } + } else if strings.TrimSpace(line) == "" && cpu.ID != "" { + cpuInfos = append(cpuInfos, *cpu) + cpu = &linuxCpuInfo{} + } + } + if cpu.ID != "" { + cpuInfos = append(cpuInfos, *cpu) + } + + // Process the sockets/cores/threads + socketByID := map[string]*CPU{} + coreBySocket := map[string]map[string]struct{}{} + threadsByCoreBySocket := map[string]map[string]int{} + for _, c := range cpuInfos { + if _, found := socketByID[c.PhysicalID]; !found { + socketByID[c.PhysicalID] = &CPU{ + ID: c.PhysicalID, + VendorID: c.VendorID, + ModelName: c.ModelName, + } + coreBySocket[c.PhysicalID] = map[string]struct{}{} + threadsByCoreBySocket[c.PhysicalID] = map[string]int{} + } + if c.CoreID != "" { + coreBySocket[c.PhysicalID][c.PhysicalID+":"+c.CoreID] = struct{}{} + threadsByCoreBySocket[c.PhysicalID][c.PhysicalID+":"+c.CoreID]++ + } else { + coreBySocket[c.PhysicalID][c.PhysicalID+":"+c.ID] = struct{}{} + threadsByCoreBySocket[c.PhysicalID][c.PhysicalID+":"+c.ID]++ + } + } + + // Tally up the values from the tracking maps + for id, s := range socketByID { + s.CoreCount = len(coreBySocket[id]) + s.ThreadCount = 0 + + // This only works if HT is enabled, consider a more reliable model, maybe cache size comparisons? + efficiencyCoreCount := 0 + for _, threads := range threadsByCoreBySocket[id] { + s.ThreadCount += threads + if threads == 1 { + efficiencyCoreCount++ + } + } + if efficiencyCoreCount == s.CoreCount { + // 1:1 mapping means they're not actually efficiency cores, but regular cores + s.EfficiencyCoreCount = 0 + } else { + s.EfficiencyCoreCount = efficiencyCoreCount + } + } + keys := make([]string, 0, len(socketByID)) + result := make([]CPU, 0, len(socketByID)) + for k := range socketByID { + keys = append(keys, k) + } + sort.Strings(keys) + for _, k := range keys { + result = append(result, *socketByID[k]) + } + return result, nil +} diff --git a/discover/gpu_linux_test.go b/discover/gpu_linux_test.go new file mode 100644 index 0000000..c4d64e3 --- /dev/null +++ b/discover/gpu_linux_test.go @@ -0,0 +1,2097 @@ +package discover + +import ( + "bytes" + "log/slog" + "testing" +) + +func TestLinuxCPUDetails(t *testing.T) { + type results struct { + cores int + efficiency int + threads int + } + type testCase struct { + input string + expCPUs []results + expThreadCount int + } + testCases := map[string]*testCase{ + "#5554 Docker Ollama container inside the LXC": { + input: `processor : 0 +vendor_id : AuthenticAMD +cpu family : 25 +model : 160 +model name : AMD EPYC 9754 128-Core Processor +stepping : 2 +microcode : 0xaa00212 +cpu MHz : 2246.624 +cache size : 512 KB +physical id : 0 +siblings : 4 +core id : 0 +cpu cores : 4 +apicid : 0 +initial apicid : 0 +fpu : yes +fpu_exception : yes +cpuid level : 16 +wp : yes +flags : fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ht syscall nx mmxext fxsr_opt pdpe1gb rdtscp lm rep_good nopl cpuid extd_apicid tsc_known_freq pni pclmulqdq ssse3 fma cx16 pcid sse4_1 sse4_2 x2apic movbe popcnt tsc_deadline_timer aes xsave avx f16c rdrand hypervisor lahf_lm cmp_legacy svm cr8_legacy abm sse4a misalignsse 3dnowprefetch osvw perfctr_core invpcid_single ssbd ibrs ibpb stibp ibrs_enhanced vmmcall fsgsbase tsc_adjust bmi1 avx2 smep bmi2 erms invpcid avx512f avx512dq rdseed adx smap avx512ifma clflushopt clwb avx512cd sha_ni avx512bw avx512vl xsaveopt xsavec xgetbv1 xsaves avx512_bf16 clzero xsaveerptr wbnoinvd arat npt lbrv nrip_save tsc_scale vmcb_clean flushbyasid pausefilter pfthreshold v_vmsave_vmload vgif avx512vbmi umip pku ospke avx512_vbmi2 gfni vaes vpclmulqdq avx512_vnni avx512_bitalg avx512_vpopcntdq la57 rdpid fsrm flush_l1d arch_capabilities +bugs : sysret_ss_attrs spectre_v1 spectre_v2 spec_store_bypass srso +bogomips : 4493.24 +TLB size : 1024 4K pages +clflush size : 64 +cache_alignment : 64 +address sizes : 52 bits physical, 57 bits virtual +power management: + +processor : 1 +vendor_id : AuthenticAMD +cpu family : 25 +model : 160 +model name : AMD EPYC 9754 128-Core Processor +stepping : 2 +microcode : 0xaa00212 +cpu MHz : 2246.624 +cache size : 512 KB +physical id : 0 +siblings : 4 +core id : 1 +cpu cores : 4 +apicid : 1 +initial apicid : 1 +fpu : yes +fpu_exception : yes +cpuid level : 16 +wp : yes +flags : fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ht syscall nx mmxext fxsr_opt pdpe1gb rdtscp lm rep_good nopl cpuid extd_apicid tsc_known_freq pni pclmulqdq ssse3 fma cx16 pcid sse4_1 sse4_2 x2apic movbe popcnt tsc_deadline_timer aes xsave avx f16c rdrand hypervisor lahf_lm cmp_legacy svm cr8_legacy abm sse4a misalignsse 3dnowprefetch osvw perfctr_core invpcid_single ssbd ibrs ibpb stibp ibrs_enhanced vmmcall fsgsbase tsc_adjust bmi1 avx2 smep bmi2 erms invpcid avx512f avx512dq rdseed adx smap avx512ifma clflushopt clwb avx512cd sha_ni avx512bw avx512vl xsaveopt xsavec xgetbv1 xsaves avx512_bf16 clzero xsaveerptr wbnoinvd arat npt lbrv nrip_save tsc_scale vmcb_clean flushbyasid pausefilter pfthreshold v_vmsave_vmload vgif avx512vbmi umip pku ospke avx512_vbmi2 gfni vaes vpclmulqdq avx512_vnni avx512_bitalg avx512_vpopcntdq la57 rdpid fsrm flush_l1d arch_capabilities +bugs : sysret_ss_attrs spectre_v1 spectre_v2 spec_store_bypass srso +bogomips : 4493.24 +TLB size : 1024 4K pages +clflush size : 64 +cache_alignment : 64 +address sizes : 52 bits physical, 57 bits virtual +power management: + +processor : 2 +vendor_id : AuthenticAMD +cpu family : 25 +model : 160 +model name : AMD EPYC 9754 128-Core Processor +stepping : 2 +microcode : 0xaa00212 +cpu MHz : 2246.624 +cache size : 512 KB +physical id : 0 +siblings : 4 +core id : 2 +cpu cores : 4 +apicid : 2 +initial apicid : 2 +fpu : yes +fpu_exception : yes +cpuid level : 16 +wp : yes +flags : fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ht syscall nx mmxext fxsr_opt pdpe1gb rdtscp lm rep_good nopl cpuid extd_apicid tsc_known_freq pni pclmulqdq ssse3 fma cx16 pcid sse4_1 sse4_2 x2apic movbe popcnt tsc_deadline_timer aes xsave avx f16c rdrand hypervisor lahf_lm cmp_legacy svm cr8_legacy abm sse4a misalignsse 3dnowprefetch osvw perfctr_core invpcid_single ssbd ibrs ibpb stibp ibrs_enhanced vmmcall fsgsbase tsc_adjust bmi1 avx2 smep bmi2 erms invpcid avx512f avx512dq rdseed adx smap avx512ifma clflushopt clwb avx512cd sha_ni avx512bw avx512vl xsaveopt xsavec xgetbv1 xsaves avx512_bf16 clzero xsaveerptr wbnoinvd arat npt lbrv nrip_save tsc_scale vmcb_clean flushbyasid pausefilter pfthreshold v_vmsave_vmload vgif avx512vbmi umip pku ospke avx512_vbmi2 gfni vaes vpclmulqdq avx512_vnni avx512_bitalg avx512_vpopcntdq la57 rdpid fsrm flush_l1d arch_capabilities +bugs : sysret_ss_attrs spectre_v1 spectre_v2 spec_store_bypass srso +bogomips : 4493.24 +TLB size : 1024 4K pages +clflush size : 64 +cache_alignment : 64 +address sizes : 52 bits physical, 57 bits virtual +power management: + +processor : 3 +vendor_id : AuthenticAMD +cpu family : 25 +model : 160 +model name : AMD EPYC 9754 128-Core Processor +stepping : 2 +microcode : 0xaa00212 +cpu MHz : 2246.624 +cache size : 512 KB +physical id : 0 +siblings : 4 +core id : 3 +cpu cores : 4 +apicid : 3 +initial apicid : 3 +fpu : yes +fpu_exception : yes +cpuid level : 16 +wp : yes +flags : fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ht syscall nx mmxext fxsr_opt pdpe1gb rdtscp lm rep_good nopl cpuid extd_apicid tsc_known_freq pni pclmulqdq ssse3 fma cx16 pcid sse4_1 sse4_2 x2apic movbe popcnt tsc_deadline_timer aes xsave avx f16c rdrand hypervisor lahf_lm cmp_legacy svm cr8_legacy abm sse4a misalignsse 3dnowprefetch osvw perfctr_core invpcid_single ssbd ibrs ibpb stibp ibrs_enhanced vmmcall fsgsbase tsc_adjust bmi1 avx2 smep bmi2 erms invpcid avx512f avx512dq rdseed adx smap avx512ifma clflushopt clwb avx512cd sha_ni avx512bw avx512vl xsaveopt xsavec xgetbv1 xsaves avx512_bf16 clzero xsaveerptr wbnoinvd arat npt lbrv nrip_save tsc_scale vmcb_clean flushbyasid pausefilter pfthreshold v_vmsave_vmload vgif avx512vbmi umip pku ospke avx512_vbmi2 gfni vaes vpclmulqdq avx512_vnni avx512_bitalg avx512_vpopcntdq la57 rdpid fsrm flush_l1d arch_capabilities +bugs : sysret_ss_attrs spectre_v1 spectre_v2 spec_store_bypass srso +bogomips : 4493.24 +TLB size : 1024 4K pages +clflush size : 64 +cache_alignment : 64 +address sizes : 52 bits physical, 57 bits virtual +power management: + +processor : 4 +vendor_id : AuthenticAMD +cpu family : 25 +model : 160 +model name : AMD EPYC 9754 128-Core Processor +stepping : 2 +microcode : 0xaa00212 +cpu MHz : 2246.624 +cache size : 512 KB +physical id : 1 +siblings : 4 +core id : 0 +cpu cores : 4 +apicid : 4 +initial apicid : 4 +fpu : yes +fpu_exception : yes +cpuid level : 16 +wp : yes +flags : fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ht syscall nx mmxext fxsr_opt pdpe1gb rdtscp lm rep_good nopl cpuid extd_apicid tsc_known_freq pni pclmulqdq ssse3 fma cx16 pcid sse4_1 sse4_2 x2apic movbe popcnt tsc_deadline_timer aes xsave avx f16c rdrand hypervisor lahf_lm cmp_legacy svm cr8_legacy abm sse4a misalignsse 3dnowprefetch osvw perfctr_core invpcid_single ssbd ibrs ibpb stibp ibrs_enhanced vmmcall fsgsbase tsc_adjust bmi1 avx2 smep bmi2 erms invpcid avx512f avx512dq rdseed adx smap avx512ifma clflushopt clwb avx512cd sha_ni avx512bw avx512vl xsaveopt xsavec xgetbv1 xsaves avx512_bf16 clzero xsaveerptr wbnoinvd arat npt lbrv nrip_save tsc_scale vmcb_clean flushbyasid pausefilter pfthreshold v_vmsave_vmload vgif avx512vbmi umip pku ospke avx512_vbmi2 gfni vaes vpclmulqdq avx512_vnni avx512_bitalg avx512_vpopcntdq la57 rdpid fsrm flush_l1d arch_capabilities +bugs : sysret_ss_attrs spectre_v1 spectre_v2 spec_store_bypass srso +bogomips : 4493.24 +TLB size : 1024 4K pages +clflush size : 64 +cache_alignment : 64 +address sizes : 52 bits physical, 57 bits virtual +power management: + +processor : 5 +vendor_id : AuthenticAMD +cpu family : 25 +model : 160 +model name : AMD EPYC 9754 128-Core Processor +stepping : 2 +microcode : 0xaa00212 +cpu MHz : 2246.624 +cache size : 512 KB +physical id : 1 +siblings : 4 +core id : 1 +cpu cores : 4 +apicid : 5 +initial apicid : 5 +fpu : yes +fpu_exception : yes +cpuid level : 16 +wp : yes +flags : fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ht syscall nx mmxext fxsr_opt pdpe1gb rdtscp lm rep_good nopl cpuid extd_apicid tsc_known_freq pni pclmulqdq ssse3 fma cx16 pcid sse4_1 sse4_2 x2apic movbe popcnt tsc_deadline_timer aes xsave avx f16c rdrand hypervisor lahf_lm cmp_legacy svm cr8_legacy abm sse4a misalignsse 3dnowprefetch osvw perfctr_core invpcid_single ssbd ibrs ibpb stibp ibrs_enhanced vmmcall fsgsbase tsc_adjust bmi1 avx2 smep bmi2 erms invpcid avx512f avx512dq rdseed adx smap avx512ifma clflushopt clwb avx512cd sha_ni avx512bw avx512vl xsaveopt xsavec xgetbv1 xsaves avx512_bf16 clzero xsaveerptr wbnoinvd arat npt lbrv nrip_save tsc_scale vmcb_clean flushbyasid pausefilter pfthreshold v_vmsave_vmload vgif avx512vbmi umip pku ospke avx512_vbmi2 gfni vaes vpclmulqdq avx512_vnni avx512_bitalg avx512_vpopcntdq la57 rdpid fsrm flush_l1d arch_capabilities +bugs : sysret_ss_attrs spectre_v1 spectre_v2 spec_store_bypass srso +bogomips : 4493.24 +TLB size : 1024 4K pages +clflush size : 64 +cache_alignment : 64 +address sizes : 52 bits physical, 57 bits virtual +power management: + +processor : 6 +vendor_id : AuthenticAMD +cpu family : 25 +model : 160 +model name : AMD EPYC 9754 128-Core Processor +stepping : 2 +microcode : 0xaa00212 +cpu MHz : 2246.624 +cache size : 512 KB +physical id : 1 +siblings : 4 +core id : 2 +cpu cores : 4 +apicid : 6 +initial apicid : 6 +fpu : yes +fpu_exception : yes +cpuid level : 16 +wp : yes +flags : fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ht syscall nx mmxext fxsr_opt pdpe1gb rdtscp lm rep_good nopl cpuid extd_apicid tsc_known_freq pni pclmulqdq ssse3 fma cx16 pcid sse4_1 sse4_2 x2apic movbe popcnt tsc_deadline_timer aes xsave avx f16c rdrand hypervisor lahf_lm cmp_legacy svm cr8_legacy abm sse4a misalignsse 3dnowprefetch osvw perfctr_core invpcid_single ssbd ibrs ibpb stibp ibrs_enhanced vmmcall fsgsbase tsc_adjust bmi1 avx2 smep bmi2 erms invpcid avx512f avx512dq rdseed adx smap avx512ifma clflushopt clwb avx512cd sha_ni avx512bw avx512vl xsaveopt xsavec xgetbv1 xsaves avx512_bf16 clzero xsaveerptr wbnoinvd arat npt lbrv nrip_save tsc_scale vmcb_clean flushbyasid pausefilter pfthreshold v_vmsave_vmload vgif avx512vbmi umip pku ospke avx512_vbmi2 gfni vaes vpclmulqdq avx512_vnni avx512_bitalg avx512_vpopcntdq la57 rdpid fsrm flush_l1d arch_capabilities +bugs : sysret_ss_attrs spectre_v1 spectre_v2 spec_store_bypass srso +bogomips : 4493.24 +TLB size : 1024 4K pages +clflush size : 64 +cache_alignment : 64 +address sizes : 52 bits physical, 57 bits virtual +power management: + +processor : 7 +vendor_id : AuthenticAMD +cpu family : 25 +model : 160 +model name : AMD EPYC 9754 128-Core Processor +stepping : 2 +microcode : 0xaa00212 +cpu MHz : 2246.624 +cache size : 512 KB +physical id : 1 +siblings : 4 +core id : 3 +cpu cores : 4 +apicid : 7 +initial apicid : 7 +fpu : yes +fpu_exception : yes +cpuid level : 16 +wp : yes +flags : fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ht syscall nx mmxext fxsr_opt pdpe1gb rdtscp lm rep_good nopl cpuid extd_apicid tsc_known_freq pni pclmulqdq ssse3 fma cx16 pcid sse4_1 sse4_2 x2apic movbe popcnt tsc_deadline_timer aes xsave avx f16c rdrand hypervisor lahf_lm cmp_legacy svm cr8_legacy abm sse4a misalignsse 3dnowprefetch osvw perfctr_core invpcid_single ssbd ibrs ibpb stibp ibrs_enhanced vmmcall fsgsbase tsc_adjust bmi1 avx2 smep bmi2 erms invpcid avx512f avx512dq rdseed adx smap avx512ifma clflushopt clwb avx512cd sha_ni avx512bw avx512vl xsaveopt xsavec xgetbv1 xsaves avx512_bf16 clzero xsaveerptr wbnoinvd arat npt lbrv nrip_save tsc_scale vmcb_clean flushbyasid pausefilter pfthreshold v_vmsave_vmload vgif avx512vbmi umip pku ospke avx512_vbmi2 gfni vaes vpclmulqdq avx512_vnni avx512_bitalg avx512_vpopcntdq la57 rdpid fsrm flush_l1d arch_capabilities +bugs : sysret_ss_attrs spectre_v1 spectre_v2 spec_store_bypass srso +bogomips : 4493.24 +TLB size : 1024 4K pages +clflush size : 64 +cache_alignment : 64 +address sizes : 52 bits physical, 57 bits virtual +power management: +`, + expCPUs: []results{ + { + cores: 4, + efficiency: 0, + threads: 4, + }, + { + cores: 4, + efficiency: 0, + threads: 4, + }, + }, + expThreadCount: 8, + }, + + // Single Socket, 8 cores + "#5554 LXC direct output": { + input: `processor : 0 +vendor_id : AuthenticAMD +cpu family : 25 +model : 160 +model name : AMD EPYC 9754 128-Core Processor +stepping : 2 +microcode : 0xaa00212 +cpu MHz : 3094.910 +cache size : 1024 KB +physical id : 0 +siblings : 256 +core id : 0 +cpu cores : 128 +apicid : 0 +initial apicid : 0 +fpu : yes +fpu_exception : yes +cpuid level : 16 +wp : yes +flags : fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ht syscall nx mmxext fxsr_opt pdpe1gb rdtscp lm constant_tsc rep_good amd_lbr_v2 nopl nonstop_tsc cpuid extd_apicid aperfmperf rapl pni pclmulqdq monitor ssse3 fma cx16 pcid sse4_1 sse4_2 x2apic movbe popcnt aes xsave avx f16c rdrand lahf_lm cmp_legacy svm extapic cr8_legacy abm sse4a misalignsse 3dnowprefetch osvw ibs skinit wdt tce topoext perfctr_core perfctr_nb bpext perfctr_llc mwaitx cpb cat_l3 cdp_l3 hw_pstate ssbd mba perfmon_v2 ibrs ibpb stibp ibrs_enhanced vmmcall fsgsbase bmi1 avx2 smep bmi2 erms invpcid cqm rdt_a avx512f avx512dq rdseed adx smap avx512ifma clflushopt clwb avx512cd sha_ni avx512bw avx512vl xsaveopt xsavec xgetbv1 xsaves cqm_llc cqm_occup_llc cqm_mbm_total cqm_mbm_local user_shstk avx512_bf16 clzero irperf xsaveerptr rdpru wbnoinvd amd_ppin cppc arat npt lbrv svm_lock nrip_save tsc_scale vmcb_clean flushbyasid decodeassists pausefilter pfthreshold avic v_vmsave_vmload vgif x2avic v_spec_ctrl vnmi avx512vbmi umip pku ospke avx512_vbmi2 gfni vaes vpclmulqdq avx512_vnni avx512_bitalg avx512_vpopcntdq la57 rdpid overflow_recov succor smca fsrm flush_l1d debug_swap +bugs : sysret_ss_attrs spectre_v1 spectre_v2 spec_store_bypass srso +bogomips : 4492.85 +TLB size : 3584 4K pages +clflush size : 64 +cache_alignment : 64 +address sizes : 52 bits physical, 57 bits virtual +power management: ts ttp tm hwpstate cpb eff_freq_ro [13] [14] + +processor : 1 +vendor_id : AuthenticAMD +cpu family : 25 +model : 160 +model name : AMD EPYC 9754 128-Core Processor +stepping : 2 +microcode : 0xaa00212 +cpu MHz : 3094.470 +cache size : 1024 KB +physical id : 0 +siblings : 256 +core id : 1 +cpu cores : 128 +apicid : 2 +initial apicid : 2 +fpu : yes +fpu_exception : yes +cpuid level : 16 +wp : yes +flags : fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ht syscall nx mmxext fxsr_opt pdpe1gb rdtscp lm constant_tsc rep_good amd_lbr_v2 nopl nonstop_tsc cpuid extd_apicid aperfmperf rapl pni pclmulqdq monitor ssse3 fma cx16 pcid sse4_1 sse4_2 x2apic movbe popcnt aes xsave avx f16c rdrand lahf_lm cmp_legacy svm extapic cr8_legacy abm sse4a misalignsse 3dnowprefetch osvw ibs skinit wdt tce topoext perfctr_core perfctr_nb bpext perfctr_llc mwaitx cpb cat_l3 cdp_l3 hw_pstate ssbd mba perfmon_v2 ibrs ibpb stibp ibrs_enhanced vmmcall fsgsbase bmi1 avx2 smep bmi2 erms invpcid cqm rdt_a avx512f avx512dq rdseed adx smap avx512ifma clflushopt clwb avx512cd sha_ni avx512bw avx512vl xsaveopt xsavec xgetbv1 xsaves cqm_llc cqm_occup_llc cqm_mbm_total cqm_mbm_local user_shstk avx512_bf16 clzero irperf xsaveerptr rdpru wbnoinvd amd_ppin cppc arat npt lbrv svm_lock nrip_save tsc_scale vmcb_clean flushbyasid decodeassists pausefilter pfthreshold avic v_vmsave_vmload vgif x2avic v_spec_ctrl vnmi avx512vbmi umip pku ospke avx512_vbmi2 gfni vaes vpclmulqdq avx512_vnni avx512_bitalg avx512_vpopcntdq la57 rdpid overflow_recov succor smca fsrm flush_l1d debug_swap +bugs : sysret_ss_attrs spectre_v1 spectre_v2 spec_store_bypass srso +bogomips : 4492.85 +TLB size : 3584 4K pages +clflush size : 64 +cache_alignment : 64 +address sizes : 52 bits physical, 57 bits virtual +power management: ts ttp tm hwpstate cpb eff_freq_ro [13] [14] + +processor : 2 +vendor_id : AuthenticAMD +cpu family : 25 +model : 160 +model name : AMD EPYC 9754 128-Core Processor +stepping : 2 +microcode : 0xaa00212 +cpu MHz : 3094.918 +cache size : 1024 KB +physical id : 0 +siblings : 256 +core id : 2 +cpu cores : 128 +apicid : 4 +initial apicid : 4 +fpu : yes +fpu_exception : yes +cpuid level : 16 +wp : yes +flags : fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ht syscall nx mmxext fxsr_opt pdpe1gb rdtscp lm constant_tsc rep_good amd_lbr_v2 nopl nonstop_tsc cpuid extd_apicid aperfmperf rapl pni pclmulqdq monitor ssse3 fma cx16 pcid sse4_1 sse4_2 x2apic movbe popcnt aes xsave avx f16c rdrand lahf_lm cmp_legacy svm extapic cr8_legacy abm sse4a misalignsse 3dnowprefetch osvw ibs skinit wdt tce topoext perfctr_core perfctr_nb bpext perfctr_llc mwaitx cpb cat_l3 cdp_l3 hw_pstate ssbd mba perfmon_v2 ibrs ibpb stibp ibrs_enhanced vmmcall fsgsbase bmi1 avx2 smep bmi2 erms invpcid cqm rdt_a avx512f avx512dq rdseed adx smap avx512ifma clflushopt clwb avx512cd sha_ni avx512bw avx512vl xsaveopt xsavec xgetbv1 xsaves cqm_llc cqm_occup_llc cqm_mbm_total cqm_mbm_local user_shstk avx512_bf16 clzero irperf xsaveerptr rdpru wbnoinvd amd_ppin cppc arat npt lbrv svm_lock nrip_save tsc_scale vmcb_clean flushbyasid decodeassists pausefilter pfthreshold avic v_vmsave_vmload vgif x2avic v_spec_ctrl vnmi avx512vbmi umip pku ospke avx512_vbmi2 gfni vaes vpclmulqdq avx512_vnni avx512_bitalg avx512_vpopcntdq la57 rdpid overflow_recov succor smca fsrm flush_l1d debug_swap +bugs : sysret_ss_attrs spectre_v1 spectre_v2 spec_store_bypass srso +bogomips : 4492.85 +TLB size : 3584 4K pages +clflush size : 64 +cache_alignment : 64 +address sizes : 52 bits physical, 57 bits virtual +power management: ts ttp tm hwpstate cpb eff_freq_ro [13] [14] + +processor : 3 +vendor_id : AuthenticAMD +cpu family : 25 +model : 160 +model name : AMD EPYC 9754 128-Core Processor +stepping : 2 +microcode : 0xaa00212 +cpu MHz : 2250.000 +cache size : 1024 KB +physical id : 0 +siblings : 256 +core id : 3 +cpu cores : 128 +apicid : 6 +initial apicid : 6 +fpu : yes +fpu_exception : yes +cpuid level : 16 +wp : yes +flags : fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ht syscall nx mmxext fxsr_opt pdpe1gb rdtscp lm constant_tsc rep_good amd_lbr_v2 nopl nonstop_tsc cpuid extd_apicid aperfmperf rapl pni pclmulqdq monitor ssse3 fma cx16 pcid sse4_1 sse4_2 x2apic movbe popcnt aes xsave avx f16c rdrand lahf_lm cmp_legacy svm extapic cr8_legacy abm sse4a misalignsse 3dnowprefetch osvw ibs skinit wdt tce topoext perfctr_core perfctr_nb bpext perfctr_llc mwaitx cpb cat_l3 cdp_l3 hw_pstate ssbd mba perfmon_v2 ibrs ibpb stibp ibrs_enhanced vmmcall fsgsbase bmi1 avx2 smep bmi2 erms invpcid cqm rdt_a avx512f avx512dq rdseed adx smap avx512ifma clflushopt clwb avx512cd sha_ni avx512bw avx512vl xsaveopt xsavec xgetbv1 xsaves cqm_llc cqm_occup_llc cqm_mbm_total cqm_mbm_local user_shstk avx512_bf16 clzero irperf xsaveerptr rdpru wbnoinvd amd_ppin cppc arat npt lbrv svm_lock nrip_save tsc_scale vmcb_clean flushbyasid decodeassists pausefilter pfthreshold avic v_vmsave_vmload vgif x2avic v_spec_ctrl vnmi avx512vbmi umip pku ospke avx512_vbmi2 gfni vaes vpclmulqdq avx512_vnni avx512_bitalg avx512_vpopcntdq la57 rdpid overflow_recov succor smca fsrm flush_l1d debug_swap +bugs : sysret_ss_attrs spectre_v1 spectre_v2 spec_store_bypass srso +bogomips : 4492.85 +TLB size : 3584 4K pages +clflush size : 64 +cache_alignment : 64 +address sizes : 52 bits physical, 57 bits virtual +power management: ts ttp tm hwpstate cpb eff_freq_ro [13] [14] + +processor : 4 +vendor_id : AuthenticAMD +cpu family : 25 +model : 160 +model name : AMD EPYC 9754 128-Core Processor +stepping : 2 +microcode : 0xaa00212 +cpu MHz : 3090.662 +cache size : 1024 KB +physical id : 0 +siblings : 256 +core id : 4 +cpu cores : 128 +apicid : 8 +initial apicid : 8 +fpu : yes +fpu_exception : yes +cpuid level : 16 +wp : yes +flags : fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ht syscall nx mmxext fxsr_opt pdpe1gb rdtscp lm constant_tsc rep_good amd_lbr_v2 nopl nonstop_tsc cpuid extd_apicid aperfmperf rapl pni pclmulqdq monitor ssse3 fma cx16 pcid sse4_1 sse4_2 x2apic movbe popcnt aes xsave avx f16c rdrand lahf_lm cmp_legacy svm extapic cr8_legacy abm sse4a misalignsse 3dnowprefetch osvw ibs skinit wdt tce topoext perfctr_core perfctr_nb bpext perfctr_llc mwaitx cpb cat_l3 cdp_l3 hw_pstate ssbd mba perfmon_v2 ibrs ibpb stibp ibrs_enhanced vmmcall fsgsbase bmi1 avx2 smep bmi2 erms invpcid cqm rdt_a avx512f avx512dq rdseed adx smap avx512ifma clflushopt clwb avx512cd sha_ni avx512bw avx512vl xsaveopt xsavec xgetbv1 xsaves cqm_llc cqm_occup_llc cqm_mbm_total cqm_mbm_local user_shstk avx512_bf16 clzero irperf xsaveerptr rdpru wbnoinvd amd_ppin cppc arat npt lbrv svm_lock nrip_save tsc_scale vmcb_clean flushbyasid decodeassists pausefilter pfthreshold avic v_vmsave_vmload vgif x2avic v_spec_ctrl vnmi avx512vbmi umip pku ospke avx512_vbmi2 gfni vaes vpclmulqdq avx512_vnni avx512_bitalg avx512_vpopcntdq la57 rdpid overflow_recov succor smca fsrm flush_l1d debug_swap +bugs : sysret_ss_attrs spectre_v1 spectre_v2 spec_store_bypass srso +bogomips : 4492.85 +TLB size : 3584 4K pages +clflush size : 64 +cache_alignment : 64 +address sizes : 52 bits physical, 57 bits virtual +power management: ts ttp tm hwpstate cpb eff_freq_ro [13] [14] + +processor : 5 +vendor_id : AuthenticAMD +cpu family : 25 +model : 160 +model name : AMD EPYC 9754 128-Core Processor +stepping : 2 +microcode : 0xaa00212 +cpu MHz : 3093.734 +cache size : 1024 KB +physical id : 0 +siblings : 256 +core id : 5 +cpu cores : 128 +apicid : 10 +initial apicid : 10 +fpu : yes +fpu_exception : yes +cpuid level : 16 +wp : yes +flags : fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ht syscall nx mmxext fxsr_opt pdpe1gb rdtscp lm constant_tsc rep_good amd_lbr_v2 nopl nonstop_tsc cpuid extd_apicid aperfmperf rapl pni pclmulqdq monitor ssse3 fma cx16 pcid sse4_1 sse4_2 x2apic movbe popcnt aes xsave avx f16c rdrand lahf_lm cmp_legacy svm extapic cr8_legacy abm sse4a misalignsse 3dnowprefetch osvw ibs skinit wdt tce topoext perfctr_core perfctr_nb bpext perfctr_llc mwaitx cpb cat_l3 cdp_l3 hw_pstate ssbd mba perfmon_v2 ibrs ibpb stibp ibrs_enhanced vmmcall fsgsbase bmi1 avx2 smep bmi2 erms invpcid cqm rdt_a avx512f avx512dq rdseed adx smap avx512ifma clflushopt clwb avx512cd sha_ni avx512bw avx512vl xsaveopt xsavec xgetbv1 xsaves cqm_llc cqm_occup_llc cqm_mbm_total cqm_mbm_local user_shstk avx512_bf16 clzero irperf xsaveerptr rdpru wbnoinvd amd_ppin cppc arat npt lbrv svm_lock nrip_save tsc_scale vmcb_clean flushbyasid decodeassists pausefilter pfthreshold avic v_vmsave_vmload vgif x2avic v_spec_ctrl vnmi avx512vbmi umip pku ospke avx512_vbmi2 gfni vaes vpclmulqdq avx512_vnni avx512_bitalg avx512_vpopcntdq la57 rdpid overflow_recov succor smca fsrm flush_l1d debug_swap +bugs : sysret_ss_attrs spectre_v1 spectre_v2 spec_store_bypass srso +bogomips : 4492.85 +TLB size : 3584 4K pages +clflush size : 64 +cache_alignment : 64 +address sizes : 52 bits physical, 57 bits virtual +power management: ts ttp tm hwpstate cpb eff_freq_ro [13] [14] + +processor : 6 +vendor_id : AuthenticAMD +cpu family : 25 +model : 160 +model name : AMD EPYC 9754 128-Core Processor +stepping : 2 +microcode : 0xaa00212 +cpu MHz : 2250.000 +cache size : 1024 KB +physical id : 0 +siblings : 256 +core id : 6 +cpu cores : 128 +apicid : 12 +initial apicid : 12 +fpu : yes +fpu_exception : yes +cpuid level : 16 +wp : yes +flags : fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ht syscall nx mmxext fxsr_opt pdpe1gb rdtscp lm constant_tsc rep_good amd_lbr_v2 nopl nonstop_tsc cpuid extd_apicid aperfmperf rapl pni pclmulqdq monitor ssse3 fma cx16 pcid sse4_1 sse4_2 x2apic movbe popcnt aes xsave avx f16c rdrand lahf_lm cmp_legacy svm extapic cr8_legacy abm sse4a misalignsse 3dnowprefetch osvw ibs skinit wdt tce topoext perfctr_core perfctr_nb bpext perfctr_llc mwaitx cpb cat_l3 cdp_l3 hw_pstate ssbd mba perfmon_v2 ibrs ibpb stibp ibrs_enhanced vmmcall fsgsbase bmi1 avx2 smep bmi2 erms invpcid cqm rdt_a avx512f avx512dq rdseed adx smap avx512ifma clflushopt clwb avx512cd sha_ni avx512bw avx512vl xsaveopt xsavec xgetbv1 xsaves cqm_llc cqm_occup_llc cqm_mbm_total cqm_mbm_local user_shstk avx512_bf16 clzero irperf xsaveerptr rdpru wbnoinvd amd_ppin cppc arat npt lbrv svm_lock nrip_save tsc_scale vmcb_clean flushbyasid decodeassists pausefilter pfthreshold avic v_vmsave_vmload vgif x2avic v_spec_ctrl vnmi avx512vbmi umip pku ospke avx512_vbmi2 gfni vaes vpclmulqdq avx512_vnni avx512_bitalg avx512_vpopcntdq la57 rdpid overflow_recov succor smca fsrm flush_l1d debug_swap +bugs : sysret_ss_attrs spectre_v1 spectre_v2 spec_store_bypass srso +bogomips : 4492.85 +TLB size : 3584 4K pages +clflush size : 64 +cache_alignment : 64 +address sizes : 52 bits physical, 57 bits virtual +power management: ts ttp tm hwpstate cpb eff_freq_ro [13] [14] + +processor : 7 +vendor_id : AuthenticAMD +cpu family : 25 +model : 160 +model name : AMD EPYC 9754 128-Core Processor +stepping : 2 +microcode : 0xaa00212 +cpu MHz : 2250.000 +cache size : 1024 KB +physical id : 0 +siblings : 256 +core id : 7 +cpu cores : 128 +apicid : 14 +initial apicid : 14 +fpu : yes +fpu_exception : yes +cpuid level : 16 +wp : yes +flags : fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ht syscall nx mmxext fxsr_opt pdpe1gb rdtscp lm constant_tsc rep_good amd_lbr_v2 nopl nonstop_tsc cpuid extd_apicid aperfmperf rapl pni pclmulqdq monitor ssse3 fma cx16 pcid sse4_1 sse4_2 x2apic movbe popcnt aes xsave avx f16c rdrand lahf_lm cmp_legacy svm extapic cr8_legacy abm sse4a misalignsse 3dnowprefetch osvw ibs skinit wdt tce topoext perfctr_core perfctr_nb bpext perfctr_llc mwaitx cpb cat_l3 cdp_l3 hw_pstate ssbd mba perfmon_v2 ibrs ibpb stibp ibrs_enhanced vmmcall fsgsbase bmi1 avx2 smep bmi2 erms invpcid cqm rdt_a avx512f avx512dq rdseed adx smap avx512ifma clflushopt clwb avx512cd sha_ni avx512bw avx512vl xsaveopt xsavec xgetbv1 xsaves cqm_llc cqm_occup_llc cqm_mbm_total cqm_mbm_local user_shstk avx512_bf16 clzero irperf xsaveerptr rdpru wbnoinvd amd_ppin cppc arat npt lbrv svm_lock nrip_save tsc_scale vmcb_clean flushbyasid decodeassists pausefilter pfthreshold avic v_vmsave_vmload vgif x2avic v_spec_ctrl vnmi avx512vbmi umip pku ospke avx512_vbmi2 gfni vaes vpclmulqdq avx512_vnni avx512_bitalg avx512_vpopcntdq la57 rdpid overflow_recov succor smca fsrm flush_l1d debug_swap +bugs : sysret_ss_attrs spectre_v1 spectre_v2 spec_store_bypass srso +bogomips : 4492.85 +TLB size : 3584 4K pages +clflush size : 64 +cache_alignment : 64 +address sizes : 52 bits physical, 57 bits virtual +power management: ts ttp tm hwpstate cpb eff_freq_ro [13] [14] +`, + expCPUs: []results{ + { + cores: 8, + efficiency: 0, + threads: 8, + }, + }, + expThreadCount: 8, + }, + + // Note: this was a partial cut-and-paste missing at least some initial logical processor definitions + // Single Socket, 29 cores + "#5554 LXC docker container output": { + input: `processor : 483 +vendor_id : AuthenticAMD +cpu family : 25 +model : 160 +model name : AMD EPYC 9754 128-Core Processor +stepping : 2 +microcode : 0xaa00212 +cpu MHz : 2250.000 +cache size : 1024 KB +physical id : 1 +siblings : 256 +core id : 19 +cpu cores : 128 +apicid : 295 +initial apicid : 295 +fpu : yes +fpu_exception : yes +cpuid level : 16 +wp : yes +flags : fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ht syscall nx mmxext fxsr_opt pdpe1gb rdtscp lm constant_tsc rep_good amd_lbr_v2 nopl nonstop_tsc cpuid extd_apicid aperfmperf rapl pni pclmulqdq monitor ssse3 fma cx16 pcid sse4_1 sse4_2 x2apic movbe popcnt aes xsave avx f16c rdrand lahf_lm cmp_legacy svm extapic cr8_legacy abm sse4a misalignsse 3dnowprefetch osvw ibs skinit wdt tce topoext perfctr_core perfctr_nb bpext perfctr_llc mwaitx cpb cat_l3 cdp_l3 hw_pstate ssbd mba perfmon_v2 ibrs ibpb stibp ibrs_enhanced vmmcall fsgsbase bmi1 avx2 smep bmi2 erms invpcid cqm rdt_a avx512f avx512dq rdseed adx smap avx512ifma clflushopt clwb avx512cd sha_ni avx512bw avx512vl xsaveopt xsavec xgetbv1 xsaves cqm_llc cqm_occup_llc cqm_mbm_total cqm_mbm_local user_shstk avx512_bf16 clzero irperf xsaveerptr rdpru wbnoinvd amd_ppin cppc arat npt lbrv svm_lock nrip_save tsc_scale vmcb_clean flushbyasid decodeassists pausefilter pfthreshold avic v_vmsave_vmload vgif x2avic v_spec_ctrl vnmi avx512vbmi umip pku ospke avx512_vbmi2 gfni vaes vpclmulqdq avx512_vnni avx512_bitalg avx512_vpopcntdq la57 rdpid overflow_recov succor smca fsrm flush_l1d debug_swap +bugs : sysret_ss_attrs spectre_v1 spectre_v2 spec_store_bypass srso +bogomips : 4492.85 +TLB size : 3584 4K pages +clflush size : 64 +cache_alignment : 64 +address sizes : 52 bits physical, 57 bits virtual +power management: ts ttp tm hwpstate cpb eff_freq_ro [13] [14] + +processor : 484 +vendor_id : AuthenticAMD +cpu family : 25 +model : 160 +model name : AMD EPYC 9754 128-Core Processor +stepping : 2 +microcode : 0xaa00212 +cpu MHz : 2250.000 +cache size : 1024 KB +physical id : 1 +siblings : 256 +core id : 20 +cpu cores : 128 +apicid : 297 +initial apicid : 297 +fpu : yes +fpu_exception : yes +cpuid level : 16 +wp : yes +flags : fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ht syscall nx mmxext fxsr_opt pdpe1gb rdtscp lm constant_tsc rep_good amd_lbr_v2 nopl nonstop_tsc cpuid extd_apicid aperfmperf rapl pni pclmulqdq monitor ssse3 fma cx16 pcid sse4_1 sse4_2 x2apic movbe popcnt aes xsave avx f16c rdrand lahf_lm cmp_legacy svm extapic cr8_legacy abm sse4a misalignsse 3dnowprefetch osvw ibs skinit wdt tce topoext perfctr_core perfctr_nb bpext perfctr_llc mwaitx cpb cat_l3 cdp_l3 hw_pstate ssbd mba perfmon_v2 ibrs ibpb stibp ibrs_enhanced vmmcall fsgsbase bmi1 avx2 smep bmi2 erms invpcid cqm rdt_a avx512f avx512dq rdseed adx smap avx512ifma clflushopt clwb avx512cd sha_ni avx512bw avx512vl xsaveopt xsavec xgetbv1 xsaves cqm_llc cqm_occup_llc cqm_mbm_total cqm_mbm_local user_shstk avx512_bf16 clzero irperf xsaveerptr rdpru wbnoinvd amd_ppin cppc arat npt lbrv svm_lock nrip_save tsc_scale vmcb_clean flushbyasid decodeassists pausefilter pfthreshold avic v_vmsave_vmload vgif x2avic v_spec_ctrl vnmi avx512vbmi umip pku ospke avx512_vbmi2 gfni vaes vpclmulqdq avx512_vnni avx512_bitalg avx512_vpopcntdq la57 rdpid overflow_recov succor smca fsrm flush_l1d debug_swap +bugs : sysret_ss_attrs spectre_v1 spectre_v2 spec_store_bypass srso +bogomips : 4492.85 +TLB size : 3584 4K pages +clflush size : 64 +cache_alignment : 64 +address sizes : 52 bits physical, 57 bits virtual +power management: ts ttp tm hwpstate cpb eff_freq_ro [13] [14] + +processor : 485 +vendor_id : AuthenticAMD +cpu family : 25 +model : 160 +model name : AMD EPYC 9754 128-Core Processor +stepping : 2 +microcode : 0xaa00212 +cpu MHz : 2250.000 +cache size : 1024 KB +physical id : 1 +siblings : 256 +core id : 21 +cpu cores : 128 +apicid : 299 +initial apicid : 299 +fpu : yes +fpu_exception : yes +cpuid level : 16 +wp : yes +flags : fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ht syscall nx mmxext fxsr_opt pdpe1gb rdtscp lm constant_tsc rep_good amd_lbr_v2 nopl nonstop_tsc cpuid extd_apicid aperfmperf rapl pni pclmulqdq monitor ssse3 fma cx16 pcid sse4_1 sse4_2 x2apic movbe popcnt aes xsave avx f16c rdrand lahf_lm cmp_legacy svm extapic cr8_legacy abm sse4a misalignsse 3dnowprefetch osvw ibs skinit wdt tce topoext perfctr_core perfctr_nb bpext perfctr_llc mwaitx cpb cat_l3 cdp_l3 hw_pstate ssbd mba perfmon_v2 ibrs ibpb stibp ibrs_enhanced vmmcall fsgsbase bmi1 avx2 smep bmi2 erms invpcid cqm rdt_a avx512f avx512dq rdseed adx smap avx512ifma clflushopt clwb avx512cd sha_ni avx512bw avx512vl xsaveopt xsavec xgetbv1 xsaves cqm_llc cqm_occup_llc cqm_mbm_total cqm_mbm_local user_shstk avx512_bf16 clzero irperf xsaveerptr rdpru wbnoinvd amd_ppin cppc arat npt lbrv svm_lock nrip_save tsc_scale vmcb_clean flushbyasid decodeassists pausefilter pfthreshold avic v_vmsave_vmload vgif x2avic v_spec_ctrl vnmi avx512vbmi umip pku ospke avx512_vbmi2 gfni vaes vpclmulqdq avx512_vnni avx512_bitalg avx512_vpopcntdq la57 rdpid overflow_recov succor smca fsrm flush_l1d debug_swap +bugs : sysret_ss_attrs spectre_v1 spectre_v2 spec_store_bypass srso +bogomips : 4492.85 +TLB size : 3584 4K pages +clflush size : 64 +cache_alignment : 64 +address sizes : 52 bits physical, 57 bits virtual +power management: ts ttp tm hwpstate cpb eff_freq_ro [13] [14] + +processor : 486 +vendor_id : AuthenticAMD +cpu family : 25 +model : 160 +model name : AMD EPYC 9754 128-Core Processor +stepping : 2 +microcode : 0xaa00212 +cpu MHz : 2250.000 +cache size : 1024 KB +physical id : 1 +siblings : 256 +core id : 22 +cpu cores : 128 +apicid : 301 +initial apicid : 301 +fpu : yes +fpu_exception : yes +cpuid level : 16 +wp : yes +flags : fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ht syscall nx mmxext fxsr_opt pdpe1gb rdtscp lm constant_tsc rep_good amd_lbr_v2 nopl nonstop_tsc cpuid extd_apicid aperfmperf rapl pni pclmulqdq monitor ssse3 fma cx16 pcid sse4_1 sse4_2 x2apic movbe popcnt aes xsave avx f16c rdrand lahf_lm cmp_legacy svm extapic cr8_legacy abm sse4a misalignsse 3dnowprefetch osvw ibs skinit wdt tce topoext perfctr_core perfctr_nb bpext perfctr_llc mwaitx cpb cat_l3 cdp_l3 hw_pstate ssbd mba perfmon_v2 ibrs ibpb stibp ibrs_enhanced vmmcall fsgsbase bmi1 avx2 smep bmi2 erms invpcid cqm rdt_a avx512f avx512dq rdseed adx smap avx512ifma clflushopt clwb avx512cd sha_ni avx512bw avx512vl xsaveopt xsavec xgetbv1 xsaves cqm_llc cqm_occup_llc cqm_mbm_total cqm_mbm_local user_shstk avx512_bf16 clzero irperf xsaveerptr rdpru wbnoinvd amd_ppin cppc arat npt lbrv svm_lock nrip_save tsc_scale vmcb_clean flushbyasid decodeassists pausefilter pfthreshold avic v_vmsave_vmload vgif x2avic v_spec_ctrl vnmi avx512vbmi umip pku ospke avx512_vbmi2 gfni vaes vpclmulqdq avx512_vnni avx512_bitalg avx512_vpopcntdq la57 rdpid overflow_recov succor smca fsrm flush_l1d debug_swap +bugs : sysret_ss_attrs spectre_v1 spectre_v2 spec_store_bypass srso +bogomips : 4492.85 +TLB size : 3584 4K pages +clflush size : 64 +cache_alignment : 64 +address sizes : 52 bits physical, 57 bits virtual +power management: ts ttp tm hwpstate cpb eff_freq_ro [13] [14] + +processor : 487 +vendor_id : AuthenticAMD +cpu family : 25 +model : 160 +model name : AMD EPYC 9754 128-Core Processor +stepping : 2 +microcode : 0xaa00212 +cpu MHz : 2250.000 +cache size : 1024 KB +physical id : 1 +siblings : 256 +core id : 23 +cpu cores : 128 +apicid : 303 +initial apicid : 303 +fpu : yes +fpu_exception : yes +cpuid level : 16 +wp : yes +flags : fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ht syscall nx mmxext fxsr_opt pdpe1gb rdtscp lm constant_tsc rep_good amd_lbr_v2 nopl nonstop_tsc cpuid extd_apicid aperfmperf rapl pni pclmulqdq monitor ssse3 fma cx16 pcid sse4_1 sse4_2 x2apic movbe popcnt aes xsave avx f16c rdrand lahf_lm cmp_legacy svm extapic cr8_legacy abm sse4a misalignsse 3dnowprefetch osvw ibs skinit wdt tce topoext perfctr_core perfctr_nb bpext perfctr_llc mwaitx cpb cat_l3 cdp_l3 hw_pstate ssbd mba perfmon_v2 ibrs ibpb stibp ibrs_enhanced vmmcall fsgsbase bmi1 avx2 smep bmi2 erms invpcid cqm rdt_a avx512f avx512dq rdseed adx smap avx512ifma clflushopt clwb avx512cd sha_ni avx512bw avx512vl xsaveopt xsavec xgetbv1 xsaves cqm_llc cqm_occup_llc cqm_mbm_total cqm_mbm_local user_shstk avx512_bf16 clzero irperf xsaveerptr rdpru wbnoinvd amd_ppin cppc arat npt lbrv svm_lock nrip_save tsc_scale vmcb_clean flushbyasid decodeassists pausefilter pfthreshold avic v_vmsave_vmload vgif x2avic v_spec_ctrl vnmi avx512vbmi umip pku ospke avx512_vbmi2 gfni vaes vpclmulqdq avx512_vnni avx512_bitalg avx512_vpopcntdq la57 rdpid overflow_recov succor smca fsrm flush_l1d debug_swap +bugs : sysret_ss_attrs spectre_v1 spectre_v2 spec_store_bypass srso +bogomips : 4492.85 +TLB size : 3584 4K pages +clflush size : 64 +cache_alignment : 64 +address sizes : 52 bits physical, 57 bits virtual +power management: ts ttp tm hwpstate cpb eff_freq_ro [13] [14] + +processor : 488 +vendor_id : AuthenticAMD +cpu family : 25 +model : 160 +model name : AMD EPYC 9754 128-Core Processor +stepping : 2 +microcode : 0xaa00212 +cpu MHz : 3094.717 +cache size : 1024 KB +physical id : 1 +siblings : 256 +core id : 24 +cpu cores : 128 +apicid : 305 +initial apicid : 305 +fpu : yes +fpu_exception : yes +cpuid level : 16 +wp : yes +flags : fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ht syscall nx mmxext fxsr_opt pdpe1gb rdtscp lm constant_tsc rep_good amd_lbr_v2 nopl nonstop_tsc cpuid extd_apicid aperfmperf rapl pni pclmulqdq monitor ssse3 fma cx16 pcid sse4_1 sse4_2 x2apic movbe popcnt aes xsave avx f16c rdrand lahf_lm cmp_legacy svm extapic cr8_legacy abm sse4a misalignsse 3dnowprefetch osvw ibs skinit wdt tce topoext perfctr_core perfctr_nb bpext perfctr_llc mwaitx cpb cat_l3 cdp_l3 hw_pstate ssbd mba perfmon_v2 ibrs ibpb stibp ibrs_enhanced vmmcall fsgsbase bmi1 avx2 smep bmi2 erms invpcid cqm rdt_a avx512f avx512dq rdseed adx smap avx512ifma clflushopt clwb avx512cd sha_ni avx512bw avx512vl xsaveopt xsavec xgetbv1 xsaves cqm_llc cqm_occup_llc cqm_mbm_total cqm_mbm_local user_shstk avx512_bf16 clzero irperf xsaveerptr rdpru wbnoinvd amd_ppin cppc arat npt lbrv svm_lock nrip_save tsc_scale vmcb_clean flushbyasid decodeassists pausefilter pfthreshold avic v_vmsave_vmload vgif x2avic v_spec_ctrl vnmi avx512vbmi umip pku ospke avx512_vbmi2 gfni vaes vpclmulqdq avx512_vnni avx512_bitalg avx512_vpopcntdq la57 rdpid overflow_recov succor smca fsrm flush_l1d debug_swap +bugs : sysret_ss_attrs spectre_v1 spectre_v2 spec_store_bypass srso +bogomips : 4492.85 +TLB size : 3584 4K pages +clflush size : 64 +cache_alignment : 64 +address sizes : 52 bits physical, 57 bits virtual +power management: ts ttp tm hwpstate cpb eff_freq_ro [13] [14] + +processor : 489 +vendor_id : AuthenticAMD +cpu family : 25 +model : 160 +model name : AMD EPYC 9754 128-Core Processor +stepping : 2 +microcode : 0xaa00212 +cpu MHz : 2250.000 +cache size : 1024 KB +physical id : 1 +siblings : 256 +core id : 25 +cpu cores : 128 +apicid : 307 +initial apicid : 307 +fpu : yes +fpu_exception : yes +cpuid level : 16 +wp : yes +flags : fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ht syscall nx mmxext fxsr_opt pdpe1gb rdtscp lm constant_tsc rep_good amd_lbr_v2 nopl nonstop_tsc cpuid extd_apicid aperfmperf rapl pni pclmulqdq monitor ssse3 fma cx16 pcid sse4_1 sse4_2 x2apic movbe popcnt aes xsave avx f16c rdrand lahf_lm cmp_legacy svm extapic cr8_legacy abm sse4a misalignsse 3dnowprefetch osvw ibs skinit wdt tce topoext perfctr_core perfctr_nb bpext perfctr_llc mwaitx cpb cat_l3 cdp_l3 hw_pstate ssbd mba perfmon_v2 ibrs ibpb stibp ibrs_enhanced vmmcall fsgsbase bmi1 avx2 smep bmi2 erms invpcid cqm rdt_a avx512f avx512dq rdseed adx smap avx512ifma clflushopt clwb avx512cd sha_ni avx512bw avx512vl xsaveopt xsavec xgetbv1 xsaves cqm_llc cqm_occup_llc cqm_mbm_total cqm_mbm_local user_shstk avx512_bf16 clzero irperf xsaveerptr rdpru wbnoinvd amd_ppin cppc arat npt lbrv svm_lock nrip_save tsc_scale vmcb_clean flushbyasid decodeassists pausefilter pfthreshold avic v_vmsave_vmload vgif x2avic v_spec_ctrl vnmi avx512vbmi umip pku ospke avx512_vbmi2 gfni vaes vpclmulqdq avx512_vnni avx512_bitalg avx512_vpopcntdq la57 rdpid overflow_recov succor smca fsrm flush_l1d debug_swap +bugs : sysret_ss_attrs spectre_v1 spectre_v2 spec_store_bypass srso +bogomips : 4492.85 +TLB size : 3584 4K pages +clflush size : 64 +cache_alignment : 64 +address sizes : 52 bits physical, 57 bits virtual +power management: ts ttp tm hwpstate cpb eff_freq_ro [13] [14] + +processor : 490 +vendor_id : AuthenticAMD +cpu family : 25 +model : 160 +model name : AMD EPYC 9754 128-Core Processor +stepping : 2 +microcode : 0xaa00212 +cpu MHz : 2250.000 +cache size : 1024 KB +physical id : 1 +siblings : 256 +core id : 26 +cpu cores : 128 +apicid : 309 +initial apicid : 309 +fpu : yes +fpu_exception : yes +cpuid level : 16 +wp : yes +flags : fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ht syscall nx mmxext fxsr_opt pdpe1gb rdtscp lm constant_tsc rep_good amd_lbr_v2 nopl nonstop_tsc cpuid extd_apicid aperfmperf rapl pni pclmulqdq monitor ssse3 fma cx16 pcid sse4_1 sse4_2 x2apic movbe popcnt aes xsave avx f16c rdrand lahf_lm cmp_legacy svm extapic cr8_legacy abm sse4a misalignsse 3dnowprefetch osvw ibs skinit wdt tce topoext perfctr_core perfctr_nb bpext perfctr_llc mwaitx cpb cat_l3 cdp_l3 hw_pstate ssbd mba perfmon_v2 ibrs ibpb stibp ibrs_enhanced vmmcall fsgsbase bmi1 avx2 smep bmi2 erms invpcid cqm rdt_a avx512f avx512dq rdseed adx smap avx512ifma clflushopt clwb avx512cd sha_ni avx512bw avx512vl xsaveopt xsavec xgetbv1 xsaves cqm_llc cqm_occup_llc cqm_mbm_total cqm_mbm_local user_shstk avx512_bf16 clzero irperf xsaveerptr rdpru wbnoinvd amd_ppin cppc arat npt lbrv svm_lock nrip_save tsc_scale vmcb_clean flushbyasid decodeassists pausefilter pfthreshold avic v_vmsave_vmload vgif x2avic v_spec_ctrl vnmi avx512vbmi umip pku ospke avx512_vbmi2 gfni vaes vpclmulqdq avx512_vnni avx512_bitalg avx512_vpopcntdq la57 rdpid overflow_recov succor smca fsrm flush_l1d debug_swap +bugs : sysret_ss_attrs spectre_v1 spectre_v2 spec_store_bypass srso +bogomips : 4492.85 +TLB size : 3584 4K pages +clflush size : 64 +cache_alignment : 64 +address sizes : 52 bits physical, 57 bits virtual +power management: ts ttp tm hwpstate cpb eff_freq_ro [13] [14] + +processor : 491 +vendor_id : AuthenticAMD +cpu family : 25 +model : 160 +model name : AMD EPYC 9754 128-Core Processor +stepping : 2 +microcode : 0xaa00212 +cpu MHz : 2250.000 +cache size : 1024 KB +physical id : 1 +siblings : 256 +core id : 27 +cpu cores : 128 +apicid : 311 +initial apicid : 311 +fpu : yes +fpu_exception : yes +cpuid level : 16 +wp : yes +flags : fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ht syscall nx mmxext fxsr_opt pdpe1gb rdtscp lm constant_tsc rep_good amd_lbr_v2 nopl nonstop_tsc cpuid extd_apicid aperfmperf rapl pni pclmulqdq monitor ssse3 fma cx16 pcid sse4_1 sse4_2 x2apic movbe popcnt aes xsave avx f16c rdrand lahf_lm cmp_legacy svm extapic cr8_legacy abm sse4a misalignsse 3dnowprefetch osvw ibs skinit wdt tce topoext perfctr_core perfctr_nb bpext perfctr_llc mwaitx cpb cat_l3 cdp_l3 hw_pstate ssbd mba perfmon_v2 ibrs ibpb stibp ibrs_enhanced vmmcall fsgsbase bmi1 avx2 smep bmi2 erms invpcid cqm rdt_a avx512f avx512dq rdseed adx smap avx512ifma clflushopt clwb avx512cd sha_ni avx512bw avx512vl xsaveopt xsavec xgetbv1 xsaves cqm_llc cqm_occup_llc cqm_mbm_total cqm_mbm_local user_shstk avx512_bf16 clzero irperf xsaveerptr rdpru wbnoinvd amd_ppin cppc arat npt lbrv svm_lock nrip_save tsc_scale vmcb_clean flushbyasid decodeassists pausefilter pfthreshold avic v_vmsave_vmload vgif x2avic v_spec_ctrl vnmi avx512vbmi umip pku ospke avx512_vbmi2 gfni vaes vpclmulqdq avx512_vnni avx512_bitalg avx512_vpopcntdq la57 rdpid overflow_recov succor smca fsrm flush_l1d debug_swap +bugs : sysret_ss_attrs spectre_v1 spectre_v2 spec_store_bypass srso +bogomips : 4492.85 +TLB size : 3584 4K pages +clflush size : 64 +cache_alignment : 64 +address sizes : 52 bits physical, 57 bits virtual +power management: ts ttp tm hwpstate cpb eff_freq_ro [13] [14] + +processor : 492 +vendor_id : AuthenticAMD +cpu family : 25 +model : 160 +model name : AMD EPYC 9754 128-Core Processor +stepping : 2 +microcode : 0xaa00212 +cpu MHz : 2250.000 +cache size : 1024 KB +physical id : 1 +siblings : 256 +core id : 28 +cpu cores : 128 +apicid : 313 +initial apicid : 313 +fpu : yes +fpu_exception : yes +cpuid level : 16 +wp : yes +flags : fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ht syscall nx mmxext fxsr_opt pdpe1gb rdtscp lm constant_tsc rep_good amd_lbr_v2 nopl nonstop_tsc cpuid extd_apicid aperfmperf rapl pni pclmulqdq monitor ssse3 fma cx16 pcid sse4_1 sse4_2 x2apic movbe popcnt aes xsave avx f16c rdrand lahf_lm cmp_legacy svm extapic cr8_legacy abm sse4a misalignsse 3dnowprefetch osvw ibs skinit wdt tce topoext perfctr_core perfctr_nb bpext perfctr_llc mwaitx cpb cat_l3 cdp_l3 hw_pstate ssbd mba perfmon_v2 ibrs ibpb stibp ibrs_enhanced vmmcall fsgsbase bmi1 avx2 smep bmi2 erms invpcid cqm rdt_a avx512f avx512dq rdseed adx smap avx512ifma clflushopt clwb avx512cd sha_ni avx512bw avx512vl xsaveopt xsavec xgetbv1 xsaves cqm_llc cqm_occup_llc cqm_mbm_total cqm_mbm_local user_shstk avx512_bf16 clzero irperf xsaveerptr rdpru wbnoinvd amd_ppin cppc arat npt lbrv svm_lock nrip_save tsc_scale vmcb_clean flushbyasid decodeassists pausefilter pfthreshold avic v_vmsave_vmload vgif x2avic v_spec_ctrl vnmi avx512vbmi umip pku ospke avx512_vbmi2 gfni vaes vpclmulqdq avx512_vnni avx512_bitalg avx512_vpopcntdq la57 rdpid overflow_recov succor smca fsrm flush_l1d debug_swap +bugs : sysret_ss_attrs spectre_v1 spectre_v2 spec_store_bypass srso +bogomips : 4492.85 +TLB size : 3584 4K pages +clflush size : 64 +cache_alignment : 64 +address sizes : 52 bits physical, 57 bits virtual +power management: ts ttp tm hwpstate cpb eff_freq_ro [13] [14] + +processor : 493 +vendor_id : AuthenticAMD +cpu family : 25 +model : 160 +model name : AMD EPYC 9754 128-Core Processor +stepping : 2 +microcode : 0xaa00212 +cpu MHz : 2250.000 +cache size : 1024 KB +physical id : 1 +siblings : 256 +core id : 29 +cpu cores : 128 +apicid : 315 +initial apicid : 315 +fpu : yes +fpu_exception : yes +cpuid level : 16 +wp : yes +flags : fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ht syscall nx mmxext fxsr_opt pdpe1gb rdtscp lm constant_tsc rep_good amd_lbr_v2 nopl nonstop_tsc cpuid extd_apicid aperfmperf rapl pni pclmulqdq monitor ssse3 fma cx16 pcid sse4_1 sse4_2 x2apic movbe popcnt aes xsave avx f16c rdrand lahf_lm cmp_legacy svm extapic cr8_legacy abm sse4a misalignsse 3dnowprefetch osvw ibs skinit wdt tce topoext perfctr_core perfctr_nb bpext perfctr_llc mwaitx cpb cat_l3 cdp_l3 hw_pstate ssbd mba perfmon_v2 ibrs ibpb stibp ibrs_enhanced vmmcall fsgsbase bmi1 avx2 smep bmi2 erms invpcid cqm rdt_a avx512f avx512dq rdseed adx smap avx512ifma clflushopt clwb avx512cd sha_ni avx512bw avx512vl xsaveopt xsavec xgetbv1 xsaves cqm_llc cqm_occup_llc cqm_mbm_total cqm_mbm_local user_shstk avx512_bf16 clzero irperf xsaveerptr rdpru wbnoinvd amd_ppin cppc arat npt lbrv svm_lock nrip_save tsc_scale vmcb_clean flushbyasid decodeassists pausefilter pfthreshold avic v_vmsave_vmload vgif x2avic v_spec_ctrl vnmi avx512vbmi umip pku ospke avx512_vbmi2 gfni vaes vpclmulqdq avx512_vnni avx512_bitalg avx512_vpopcntdq la57 rdpid overflow_recov succor smca fsrm flush_l1d debug_swap +bugs : sysret_ss_attrs spectre_v1 spectre_v2 spec_store_bypass srso +bogomips : 4492.85 +TLB size : 3584 4K pages +clflush size : 64 +cache_alignment : 64 +address sizes : 52 bits physical, 57 bits virtual +power management: ts ttp tm hwpstate cpb eff_freq_ro [13] [14] + +processor : 494 +vendor_id : AuthenticAMD +cpu family : 25 +model : 160 +model name : AMD EPYC 9754 128-Core Processor +stepping : 2 +microcode : 0xaa00212 +cpu MHz : 2250.000 +cache size : 1024 KB +physical id : 1 +siblings : 256 +core id : 30 +cpu cores : 128 +apicid : 317 +initial apicid : 317 +fpu : yes +fpu_exception : yes +cpuid level : 16 +wp : yes +flags : fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ht syscall nx mmxext fxsr_opt pdpe1gb rdtscp lm constant_tsc rep_good amd_lbr_v2 nopl nonstop_tsc cpuid extd_apicid aperfmperf rapl pni pclmulqdq monitor ssse3 fma cx16 pcid sse4_1 sse4_2 x2apic movbe popcnt aes xsave avx f16c rdrand lahf_lm cmp_legacy svm extapic cr8_legacy abm sse4a misalignsse 3dnowprefetch osvw ibs skinit wdt tce topoext perfctr_core perfctr_nb bpext perfctr_llc mwaitx cpb cat_l3 cdp_l3 hw_pstate ssbd mba perfmon_v2 ibrs ibpb stibp ibrs_enhanced vmmcall fsgsbase bmi1 avx2 smep bmi2 erms invpcid cqm rdt_a avx512f avx512dq rdseed adx smap avx512ifma clflushopt clwb avx512cd sha_ni avx512bw avx512vl xsaveopt xsavec xgetbv1 xsaves cqm_llc cqm_occup_llc cqm_mbm_total cqm_mbm_local user_shstk avx512_bf16 clzero irperf xsaveerptr rdpru wbnoinvd amd_ppin cppc arat npt lbrv svm_lock nrip_save tsc_scale vmcb_clean flushbyasid decodeassists pausefilter pfthreshold avic v_vmsave_vmload vgif x2avic v_spec_ctrl vnmi avx512vbmi umip pku ospke avx512_vbmi2 gfni vaes vpclmulqdq avx512_vnni avx512_bitalg avx512_vpopcntdq la57 rdpid overflow_recov succor smca fsrm flush_l1d debug_swap +bugs : sysret_ss_attrs spectre_v1 spectre_v2 spec_store_bypass srso +bogomips : 4492.85 +TLB size : 3584 4K pages +clflush size : 64 +cache_alignment : 64 +address sizes : 52 bits physical, 57 bits virtual +power management: ts ttp tm hwpstate cpb eff_freq_ro [13] [14] + +processor : 495 +vendor_id : AuthenticAMD +cpu family : 25 +model : 160 +model name : AMD EPYC 9754 128-Core Processor +stepping : 2 +microcode : 0xaa00212 +cpu MHz : 2250.000 +cache size : 1024 KB +physical id : 1 +siblings : 256 +core id : 31 +cpu cores : 128 +apicid : 319 +initial apicid : 319 +fpu : yes +fpu_exception : yes +cpuid level : 16 +wp : yes +flags : fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ht syscall nx mmxext fxsr_opt pdpe1gb rdtscp lm constant_tsc rep_good amd_lbr_v2 nopl nonstop_tsc cpuid extd_apicid aperfmperf rapl pni pclmulqdq monitor ssse3 fma cx16 pcid sse4_1 sse4_2 x2apic movbe popcnt aes xsave avx f16c rdrand lahf_lm cmp_legacy svm extapic cr8_legacy abm sse4a misalignsse 3dnowprefetch osvw ibs skinit wdt tce topoext perfctr_core perfctr_nb bpext perfctr_llc mwaitx cpb cat_l3 cdp_l3 hw_pstate ssbd mba perfmon_v2 ibrs ibpb stibp ibrs_enhanced vmmcall fsgsbase bmi1 avx2 smep bmi2 erms invpcid cqm rdt_a avx512f avx512dq rdseed adx smap avx512ifma clflushopt clwb avx512cd sha_ni avx512bw avx512vl xsaveopt xsavec xgetbv1 xsaves cqm_llc cqm_occup_llc cqm_mbm_total cqm_mbm_local user_shstk avx512_bf16 clzero irperf xsaveerptr rdpru wbnoinvd amd_ppin cppc arat npt lbrv svm_lock nrip_save tsc_scale vmcb_clean flushbyasid decodeassists pausefilter pfthreshold avic v_vmsave_vmload vgif x2avic v_spec_ctrl vnmi avx512vbmi umip pku ospke avx512_vbmi2 gfni vaes vpclmulqdq avx512_vnni avx512_bitalg avx512_vpopcntdq la57 rdpid overflow_recov succor smca fsrm flush_l1d debug_swap +bugs : sysret_ss_attrs spectre_v1 spectre_v2 spec_store_bypass srso +bogomips : 4492.85 +TLB size : 3584 4K pages +clflush size : 64 +cache_alignment : 64 +address sizes : 52 bits physical, 57 bits virtual +power management: ts ttp tm hwpstate cpb eff_freq_ro [13] [14] + +processor : 496 +vendor_id : AuthenticAMD +cpu family : 25 +model : 160 +model name : AMD EPYC 9754 128-Core Processor +stepping : 2 +microcode : 0xaa00212 +cpu MHz : 2250.000 +cache size : 1024 KB +physical id : 1 +siblings : 256 +core id : 80 +cpu cores : 128 +apicid : 417 +initial apicid : 417 +fpu : yes +fpu_exception : yes +cpuid level : 16 +wp : yes +flags : fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ht syscall nx mmxext fxsr_opt pdpe1gb rdtscp lm constant_tsc rep_good amd_lbr_v2 nopl nonstop_tsc cpuid extd_apicid aperfmperf rapl pni pclmulqdq monitor ssse3 fma cx16 pcid sse4_1 sse4_2 x2apic movbe popcnt aes xsave avx f16c rdrand lahf_lm cmp_legacy svm extapic cr8_legacy abm sse4a misalignsse 3dnowprefetch osvw ibs skinit wdt tce topoext perfctr_core perfctr_nb bpext perfctr_llc mwaitx cpb cat_l3 cdp_l3 hw_pstate ssbd mba perfmon_v2 ibrs ibpb stibp ibrs_enhanced vmmcall fsgsbase bmi1 avx2 smep bmi2 erms invpcid cqm rdt_a avx512f avx512dq rdseed adx smap avx512ifma clflushopt clwb avx512cd sha_ni avx512bw avx512vl xsaveopt xsavec xgetbv1 xsaves cqm_llc cqm_occup_llc cqm_mbm_total cqm_mbm_local user_shstk avx512_bf16 clzero irperf xsaveerptr rdpru wbnoinvd amd_ppin cppc arat npt lbrv svm_lock nrip_save tsc_scale vmcb_clean flushbyasid decodeassists pausefilter pfthreshold avic v_vmsave_vmload vgif x2avic v_spec_ctrl vnmi avx512vbmi umip pku ospke avx512_vbmi2 gfni vaes vpclmulqdq avx512_vnni avx512_bitalg avx512_vpopcntdq la57 rdpid overflow_recov succor smca fsrm flush_l1d debug_swap +bugs : sysret_ss_attrs spectre_v1 spectre_v2 spec_store_bypass srso +bogomips : 4492.85 +TLB size : 3584 4K pages +clflush size : 64 +cache_alignment : 64 +address sizes : 52 bits physical, 57 bits virtual +power management: ts ttp tm hwpstate cpb eff_freq_ro [13] [14] + +processor : 497 +vendor_id : AuthenticAMD +cpu family : 25 +model : 160 +model name : AMD EPYC 9754 128-Core Processor +stepping : 2 +microcode : 0xaa00212 +cpu MHz : 2250.000 +cache size : 1024 KB +physical id : 1 +siblings : 256 +core id : 81 +cpu cores : 128 +apicid : 419 +initial apicid : 419 +fpu : yes +fpu_exception : yes +cpuid level : 16 +wp : yes +flags : fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ht syscall nx mmxext fxsr_opt pdpe1gb rdtscp lm constant_tsc rep_good amd_lbr_v2 nopl nonstop_tsc cpuid extd_apicid aperfmperf rapl pni pclmulqdq monitor ssse3 fma cx16 pcid sse4_1 sse4_2 x2apic movbe popcnt aes xsave avx f16c rdrand lahf_lm cmp_legacy svm extapic cr8_legacy abm sse4a misalignsse 3dnowprefetch osvw ibs skinit wdt tce topoext perfctr_core perfctr_nb bpext perfctr_llc mwaitx cpb cat_l3 cdp_l3 hw_pstate ssbd mba perfmon_v2 ibrs ibpb stibp ibrs_enhanced vmmcall fsgsbase bmi1 avx2 smep bmi2 erms invpcid cqm rdt_a avx512f avx512dq rdseed adx smap avx512ifma clflushopt clwb avx512cd sha_ni avx512bw avx512vl xsaveopt xsavec xgetbv1 xsaves cqm_llc cqm_occup_llc cqm_mbm_total cqm_mbm_local user_shstk avx512_bf16 clzero irperf xsaveerptr rdpru wbnoinvd amd_ppin cppc arat npt lbrv svm_lock nrip_save tsc_scale vmcb_clean flushbyasid decodeassists pausefilter pfthreshold avic v_vmsave_vmload vgif x2avic v_spec_ctrl vnmi avx512vbmi umip pku ospke avx512_vbmi2 gfni vaes vpclmulqdq avx512_vnni avx512_bitalg avx512_vpopcntdq la57 rdpid overflow_recov succor smca fsrm flush_l1d debug_swap +bugs : sysret_ss_attrs spectre_v1 spectre_v2 spec_store_bypass srso +bogomips : 4492.85 +TLB size : 3584 4K pages +clflush size : 64 +cache_alignment : 64 +address sizes : 52 bits physical, 57 bits virtual +power management: ts ttp tm hwpstate cpb eff_freq_ro [13] [14] + +processor : 498 +vendor_id : AuthenticAMD +cpu family : 25 +model : 160 +model name : AMD EPYC 9754 128-Core Processor +stepping : 2 +microcode : 0xaa00212 +cpu MHz : 2250.000 +cache size : 1024 KB +physical id : 1 +siblings : 256 +core id : 82 +cpu cores : 128 +apicid : 421 +initial apicid : 421 +fpu : yes +fpu_exception : yes +cpuid level : 16 +wp : yes +flags : fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ht syscall nx mmxext fxsr_opt pdpe1gb rdtscp lm constant_tsc rep_good amd_lbr_v2 nopl nonstop_tsc cpuid extd_apicid aperfmperf rapl pni pclmulqdq monitor ssse3 fma cx16 pcid sse4_1 sse4_2 x2apic movbe popcnt aes xsave avx f16c rdrand lahf_lm cmp_legacy svm extapic cr8_legacy abm sse4a misalignsse 3dnowprefetch osvw ibs skinit wdt tce topoext perfctr_core perfctr_nb bpext perfctr_llc mwaitx cpb cat_l3 cdp_l3 hw_pstate ssbd mba perfmon_v2 ibrs ibpb stibp ibrs_enhanced vmmcall fsgsbase bmi1 avx2 smep bmi2 erms invpcid cqm rdt_a avx512f avx512dq rdseed adx smap avx512ifma clflushopt clwb avx512cd sha_ni avx512bw avx512vl xsaveopt xsavec xgetbv1 xsaves cqm_llc cqm_occup_llc cqm_mbm_total cqm_mbm_local user_shstk avx512_bf16 clzero irperf xsaveerptr rdpru wbnoinvd amd_ppin cppc arat npt lbrv svm_lock nrip_save tsc_scale vmcb_clean flushbyasid decodeassists pausefilter pfthreshold avic v_vmsave_vmload vgif x2avic v_spec_ctrl vnmi avx512vbmi umip pku ospke avx512_vbmi2 gfni vaes vpclmulqdq avx512_vnni avx512_bitalg avx512_vpopcntdq la57 rdpid overflow_recov succor smca fsrm flush_l1d debug_swap +bugs : sysret_ss_attrs spectre_v1 spectre_v2 spec_store_bypass srso +bogomips : 4492.85 +TLB size : 3584 4K pages +clflush size : 64 +cache_alignment : 64 +address sizes : 52 bits physical, 57 bits virtual +power management: ts ttp tm hwpstate cpb eff_freq_ro [13] [14] + +processor : 499 +vendor_id : AuthenticAMD +cpu family : 25 +model : 160 +model name : AMD EPYC 9754 128-Core Processor +stepping : 2 +microcode : 0xaa00212 +cpu MHz : 2250.000 +cache size : 1024 KB +physical id : 1 +siblings : 256 +core id : 83 +cpu cores : 128 +apicid : 423 +initial apicid : 423 +fpu : yes +fpu_exception : yes +cpuid level : 16 +wp : yes +flags : fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ht syscall nx mmxext fxsr_opt pdpe1gb rdtscp lm constant_tsc rep_good amd_lbr_v2 nopl nonstop_tsc cpuid extd_apicid aperfmperf rapl pni pclmulqdq monitor ssse3 fma cx16 pcid sse4_1 sse4_2 x2apic movbe popcnt aes xsave avx f16c rdrand lahf_lm cmp_legacy svm extapic cr8_legacy abm sse4a misalignsse 3dnowprefetch osvw ibs skinit wdt tce topoext perfctr_core perfctr_nb bpext perfctr_llc mwaitx cpb cat_l3 cdp_l3 hw_pstate ssbd mba perfmon_v2 ibrs ibpb stibp ibrs_enhanced vmmcall fsgsbase bmi1 avx2 smep bmi2 erms invpcid cqm rdt_a avx512f avx512dq rdseed adx smap avx512ifma clflushopt clwb avx512cd sha_ni avx512bw avx512vl xsaveopt xsavec xgetbv1 xsaves cqm_llc cqm_occup_llc cqm_mbm_total cqm_mbm_local user_shstk avx512_bf16 clzero irperf xsaveerptr rdpru wbnoinvd amd_ppin cppc arat npt lbrv svm_lock nrip_save tsc_scale vmcb_clean flushbyasid decodeassists pausefilter pfthreshold avic v_vmsave_vmload vgif x2avic v_spec_ctrl vnmi avx512vbmi umip pku ospke avx512_vbmi2 gfni vaes vpclmulqdq avx512_vnni avx512_bitalg avx512_vpopcntdq la57 rdpid overflow_recov succor smca fsrm flush_l1d debug_swap +bugs : sysret_ss_attrs spectre_v1 spectre_v2 spec_store_bypass srso +bogomips : 4492.85 +TLB size : 3584 4K pages +clflush size : 64 +cache_alignment : 64 +address sizes : 52 bits physical, 57 bits virtual +power management: ts ttp tm hwpstate cpb eff_freq_ro [13] [14] + +processor : 500 +vendor_id : AuthenticAMD +cpu family : 25 +model : 160 +model name : AMD EPYC 9754 128-Core Processor +stepping : 2 +microcode : 0xaa00212 +cpu MHz : 2250.000 +cache size : 1024 KB +physical id : 1 +siblings : 256 +core id : 84 +cpu cores : 128 +apicid : 425 +initial apicid : 425 +fpu : yes +fpu_exception : yes +cpuid level : 16 +wp : yes +flags : fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ht syscall nx mmxext fxsr_opt pdpe1gb rdtscp lm constant_tsc rep_good amd_lbr_v2 nopl nonstop_tsc cpuid extd_apicid aperfmperf rapl pni pclmulqdq monitor ssse3 fma cx16 pcid sse4_1 sse4_2 x2apic movbe popcnt aes xsave avx f16c rdrand lahf_lm cmp_legacy svm extapic cr8_legacy abm sse4a misalignsse 3dnowprefetch osvw ibs skinit wdt tce topoext perfctr_core perfctr_nb bpext perfctr_llc mwaitx cpb cat_l3 cdp_l3 hw_pstate ssbd mba perfmon_v2 ibrs ibpb stibp ibrs_enhanced vmmcall fsgsbase bmi1 avx2 smep bmi2 erms invpcid cqm rdt_a avx512f avx512dq rdseed adx smap avx512ifma clflushopt clwb avx512cd sha_ni avx512bw avx512vl xsaveopt xsavec xgetbv1 xsaves cqm_llc cqm_occup_llc cqm_mbm_total cqm_mbm_local user_shstk avx512_bf16 clzero irperf xsaveerptr rdpru wbnoinvd amd_ppin cppc arat npt lbrv svm_lock nrip_save tsc_scale vmcb_clean flushbyasid decodeassists pausefilter pfthreshold avic v_vmsave_vmload vgif x2avic v_spec_ctrl vnmi avx512vbmi umip pku ospke avx512_vbmi2 gfni vaes vpclmulqdq avx512_vnni avx512_bitalg avx512_vpopcntdq la57 rdpid overflow_recov succor smca fsrm flush_l1d debug_swap +bugs : sysret_ss_attrs spectre_v1 spectre_v2 spec_store_bypass srso +bogomips : 4492.85 +TLB size : 3584 4K pages +clflush size : 64 +cache_alignment : 64 +address sizes : 52 bits physical, 57 bits virtual +power management: ts ttp tm hwpstate cpb eff_freq_ro [13] [14] + +processor : 501 +vendor_id : AuthenticAMD +cpu family : 25 +model : 160 +model name : AMD EPYC 9754 128-Core Processor +stepping : 2 +microcode : 0xaa00212 +cpu MHz : 2250.000 +cache size : 1024 KB +physical id : 1 +siblings : 256 +core id : 85 +cpu cores : 128 +apicid : 427 +initial apicid : 427 +fpu : yes +fpu_exception : yes +cpuid level : 16 +wp : yes +flags : fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ht syscall nx mmxext fxsr_opt pdpe1gb rdtscp lm constant_tsc rep_good amd_lbr_v2 nopl nonstop_tsc cpuid extd_apicid aperfmperf rapl pni pclmulqdq monitor ssse3 fma cx16 pcid sse4_1 sse4_2 x2apic movbe popcnt aes xsave avx f16c rdrand lahf_lm cmp_legacy svm extapic cr8_legacy abm sse4a misalignsse 3dnowprefetch osvw ibs skinit wdt tce topoext perfctr_core perfctr_nb bpext perfctr_llc mwaitx cpb cat_l3 cdp_l3 hw_pstate ssbd mba perfmon_v2 ibrs ibpb stibp ibrs_enhanced vmmcall fsgsbase bmi1 avx2 smep bmi2 erms invpcid cqm rdt_a avx512f avx512dq rdseed adx smap avx512ifma clflushopt clwb avx512cd sha_ni avx512bw avx512vl xsaveopt xsavec xgetbv1 xsaves cqm_llc cqm_occup_llc cqm_mbm_total cqm_mbm_local user_shstk avx512_bf16 clzero irperf xsaveerptr rdpru wbnoinvd amd_ppin cppc arat npt lbrv svm_lock nrip_save tsc_scale vmcb_clean flushbyasid decodeassists pausefilter pfthreshold avic v_vmsave_vmload vgif x2avic v_spec_ctrl vnmi avx512vbmi umip pku ospke avx512_vbmi2 gfni vaes vpclmulqdq avx512_vnni avx512_bitalg avx512_vpopcntdq la57 rdpid overflow_recov succor smca fsrm flush_l1d debug_swap +bugs : sysret_ss_attrs spectre_v1 spectre_v2 spec_store_bypass srso +bogomips : 4492.85 +TLB size : 3584 4K pages +clflush size : 64 +cache_alignment : 64 +address sizes : 52 bits physical, 57 bits virtual +power management: ts ttp tm hwpstate cpb eff_freq_ro [13] [14] + +processor : 502 +vendor_id : AuthenticAMD +cpu family : 25 +model : 160 +model name : AMD EPYC 9754 128-Core Processor +stepping : 2 +microcode : 0xaa00212 +cpu MHz : 2250.000 +cache size : 1024 KB +physical id : 1 +siblings : 256 +core id : 86 +cpu cores : 128 +apicid : 429 +initial apicid : 429 +fpu : yes +fpu_exception : yes +cpuid level : 16 +wp : yes +flags : fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ht syscall nx mmxext fxsr_opt pdpe1gb rdtscp lm constant_tsc rep_good amd_lbr_v2 nopl nonstop_tsc cpuid extd_apicid aperfmperf rapl pni pclmulqdq monitor ssse3 fma cx16 pcid sse4_1 sse4_2 x2apic movbe popcnt aes xsave avx f16c rdrand lahf_lm cmp_legacy svm extapic cr8_legacy abm sse4a misalignsse 3dnowprefetch osvw ibs skinit wdt tce topoext perfctr_core perfctr_nb bpext perfctr_llc mwaitx cpb cat_l3 cdp_l3 hw_pstate ssbd mba perfmon_v2 ibrs ibpb stibp ibrs_enhanced vmmcall fsgsbase bmi1 avx2 smep bmi2 erms invpcid cqm rdt_a avx512f avx512dq rdseed adx smap avx512ifma clflushopt clwb avx512cd sha_ni avx512bw avx512vl xsaveopt xsavec xgetbv1 xsaves cqm_llc cqm_occup_llc cqm_mbm_total cqm_mbm_local user_shstk avx512_bf16 clzero irperf xsaveerptr rdpru wbnoinvd amd_ppin cppc arat npt lbrv svm_lock nrip_save tsc_scale vmcb_clean flushbyasid decodeassists pausefilter pfthreshold avic v_vmsave_vmload vgif x2avic v_spec_ctrl vnmi avx512vbmi umip pku ospke avx512_vbmi2 gfni vaes vpclmulqdq avx512_vnni avx512_bitalg avx512_vpopcntdq la57 rdpid overflow_recov succor smca fsrm flush_l1d debug_swap +bugs : sysret_ss_attrs spectre_v1 spectre_v2 spec_store_bypass srso +bogomips : 4492.85 +TLB size : 3584 4K pages +clflush size : 64 +cache_alignment : 64 +address sizes : 52 bits physical, 57 bits virtual +power management: ts ttp tm hwpstate cpb eff_freq_ro [13] [14] + +processor : 503 +vendor_id : AuthenticAMD +cpu family : 25 +model : 160 +model name : AMD EPYC 9754 128-Core Processor +stepping : 2 +microcode : 0xaa00212 +cpu MHz : 2250.000 +cache size : 1024 KB +physical id : 1 +siblings : 256 +core id : 87 +cpu cores : 128 +apicid : 431 +initial apicid : 431 +fpu : yes +fpu_exception : yes +cpuid level : 16 +wp : yes +flags : fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ht syscall nx mmxext fxsr_opt pdpe1gb rdtscp lm constant_tsc rep_good amd_lbr_v2 nopl nonstop_tsc cpuid extd_apicid aperfmperf rapl pni pclmulqdq monitor ssse3 fma cx16 pcid sse4_1 sse4_2 x2apic movbe popcnt aes xsave avx f16c rdrand lahf_lm cmp_legacy svm extapic cr8_legacy abm sse4a misalignsse 3dnowprefetch osvw ibs skinit wdt tce topoext perfctr_core perfctr_nb bpext perfctr_llc mwaitx cpb cat_l3 cdp_l3 hw_pstate ssbd mba perfmon_v2 ibrs ibpb stibp ibrs_enhanced vmmcall fsgsbase bmi1 avx2 smep bmi2 erms invpcid cqm rdt_a avx512f avx512dq rdseed adx smap avx512ifma clflushopt clwb avx512cd sha_ni avx512bw avx512vl xsaveopt xsavec xgetbv1 xsaves cqm_llc cqm_occup_llc cqm_mbm_total cqm_mbm_local user_shstk avx512_bf16 clzero irperf xsaveerptr rdpru wbnoinvd amd_ppin cppc arat npt lbrv svm_lock nrip_save tsc_scale vmcb_clean flushbyasid decodeassists pausefilter pfthreshold avic v_vmsave_vmload vgif x2avic v_spec_ctrl vnmi avx512vbmi umip pku ospke avx512_vbmi2 gfni vaes vpclmulqdq avx512_vnni avx512_bitalg avx512_vpopcntdq la57 rdpid overflow_recov succor smca fsrm flush_l1d debug_swap +bugs : sysret_ss_attrs spectre_v1 spectre_v2 spec_store_bypass srso +bogomips : 4492.85 +TLB size : 3584 4K pages +clflush size : 64 +cache_alignment : 64 +address sizes : 52 bits physical, 57 bits virtual +power management: ts ttp tm hwpstate cpb eff_freq_ro [13] [14] + +processor : 504 +vendor_id : AuthenticAMD +cpu family : 25 +model : 160 +model name : AMD EPYC 9754 128-Core Processor +stepping : 2 +microcode : 0xaa00212 +cpu MHz : 2250.000 +cache size : 1024 KB +physical id : 1 +siblings : 256 +core id : 88 +cpu cores : 128 +apicid : 433 +initial apicid : 433 +fpu : yes +fpu_exception : yes +cpuid level : 16 +wp : yes +flags : fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ht syscall nx mmxext fxsr_opt pdpe1gb rdtscp lm constant_tsc rep_good amd_lbr_v2 nopl nonstop_tsc cpuid extd_apicid aperfmperf rapl pni pclmulqdq monitor ssse3 fma cx16 pcid sse4_1 sse4_2 x2apic movbe popcnt aes xsave avx f16c rdrand lahf_lm cmp_legacy svm extapic cr8_legacy abm sse4a misalignsse 3dnowprefetch osvw ibs skinit wdt tce topoext perfctr_core perfctr_nb bpext perfctr_llc mwaitx cpb cat_l3 cdp_l3 hw_pstate ssbd mba perfmon_v2 ibrs ibpb stibp ibrs_enhanced vmmcall fsgsbase bmi1 avx2 smep bmi2 erms invpcid cqm rdt_a avx512f avx512dq rdseed adx smap avx512ifma clflushopt clwb avx512cd sha_ni avx512bw avx512vl xsaveopt xsavec xgetbv1 xsaves cqm_llc cqm_occup_llc cqm_mbm_total cqm_mbm_local user_shstk avx512_bf16 clzero irperf xsaveerptr rdpru wbnoinvd amd_ppin cppc arat npt lbrv svm_lock nrip_save tsc_scale vmcb_clean flushbyasid decodeassists pausefilter pfthreshold avic v_vmsave_vmload vgif x2avic v_spec_ctrl vnmi avx512vbmi umip pku ospke avx512_vbmi2 gfni vaes vpclmulqdq avx512_vnni avx512_bitalg avx512_vpopcntdq la57 rdpid overflow_recov succor smca fsrm flush_l1d debug_swap +bugs : sysret_ss_attrs spectre_v1 spectre_v2 spec_store_bypass srso +bogomips : 4492.85 +TLB size : 3584 4K pages +clflush size : 64 +cache_alignment : 64 +address sizes : 52 bits physical, 57 bits virtual +power management: ts ttp tm hwpstate cpb eff_freq_ro [13] [14] + +processor : 505 +vendor_id : AuthenticAMD +cpu family : 25 +model : 160 +model name : AMD EPYC 9754 128-Core Processor +stepping : 2 +microcode : 0xaa00212 +cpu MHz : 2250.000 +cache size : 1024 KB +physical id : 1 +siblings : 256 +core id : 89 +cpu cores : 128 +apicid : 435 +initial apicid : 435 +fpu : yes +fpu_exception : yes +cpuid level : 16 +wp : yes +flags : fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ht syscall nx mmxext fxsr_opt pdpe1gb rdtscp lm constant_tsc rep_good amd_lbr_v2 nopl nonstop_tsc cpuid extd_apicid aperfmperf rapl pni pclmulqdq monitor ssse3 fma cx16 pcid sse4_1 sse4_2 x2apic movbe popcnt aes xsave avx f16c rdrand lahf_lm cmp_legacy svm extapic cr8_legacy abm sse4a misalignsse 3dnowprefetch osvw ibs skinit wdt tce topoext perfctr_core perfctr_nb bpext perfctr_llc mwaitx cpb cat_l3 cdp_l3 hw_pstate ssbd mba perfmon_v2 ibrs ibpb stibp ibrs_enhanced vmmcall fsgsbase bmi1 avx2 smep bmi2 erms invpcid cqm rdt_a avx512f avx512dq rdseed adx smap avx512ifma clflushopt clwb avx512cd sha_ni avx512bw avx512vl xsaveopt xsavec xgetbv1 xsaves cqm_llc cqm_occup_llc cqm_mbm_total cqm_mbm_local user_shstk avx512_bf16 clzero irperf xsaveerptr rdpru wbnoinvd amd_ppin cppc arat npt lbrv svm_lock nrip_save tsc_scale vmcb_clean flushbyasid decodeassists pausefilter pfthreshold avic v_vmsave_vmload vgif x2avic v_spec_ctrl vnmi avx512vbmi umip pku ospke avx512_vbmi2 gfni vaes vpclmulqdq avx512_vnni avx512_bitalg avx512_vpopcntdq la57 rdpid overflow_recov succor smca fsrm flush_l1d debug_swap +bugs : sysret_ss_attrs spectre_v1 spectre_v2 spec_store_bypass srso +bogomips : 4492.85 +TLB size : 3584 4K pages +clflush size : 64 +cache_alignment : 64 +address sizes : 52 bits physical, 57 bits virtual +power management: ts ttp tm hwpstate cpb eff_freq_ro [13] [14] + +processor : 506 +vendor_id : AuthenticAMD +cpu family : 25 +model : 160 +model name : AMD EPYC 9754 128-Core Processor +stepping : 2 +microcode : 0xaa00212 +cpu MHz : 2250.000 +cache size : 1024 KB +physical id : 1 +siblings : 256 +core id : 90 +cpu cores : 128 +apicid : 437 +initial apicid : 437 +fpu : yes +fpu_exception : yes +cpuid level : 16 +wp : yes +flags : fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ht syscall nx mmxext fxsr_opt pdpe1gb rdtscp lm constant_tsc rep_good amd_lbr_v2 nopl nonstop_tsc cpuid extd_apicid aperfmperf rapl pni pclmulqdq monitor ssse3 fma cx16 pcid sse4_1 sse4_2 x2apic movbe popcnt aes xsave avx f16c rdrand lahf_lm cmp_legacy svm extapic cr8_legacy abm sse4a misalignsse 3dnowprefetch osvw ibs skinit wdt tce topoext perfctr_core perfctr_nb bpext perfctr_llc mwaitx cpb cat_l3 cdp_l3 hw_pstate ssbd mba perfmon_v2 ibrs ibpb stibp ibrs_enhanced vmmcall fsgsbase bmi1 avx2 smep bmi2 erms invpcid cqm rdt_a avx512f avx512dq rdseed adx smap avx512ifma clflushopt clwb avx512cd sha_ni avx512bw avx512vl xsaveopt xsavec xgetbv1 xsaves cqm_llc cqm_occup_llc cqm_mbm_total cqm_mbm_local user_shstk avx512_bf16 clzero irperf xsaveerptr rdpru wbnoinvd amd_ppin cppc arat npt lbrv svm_lock nrip_save tsc_scale vmcb_clean flushbyasid decodeassists pausefilter pfthreshold avic v_vmsave_vmload vgif x2avic v_spec_ctrl vnmi avx512vbmi umip pku ospke avx512_vbmi2 gfni vaes vpclmulqdq avx512_vnni avx512_bitalg avx512_vpopcntdq la57 rdpid overflow_recov succor smca fsrm flush_l1d debug_swap +bugs : sysret_ss_attrs spectre_v1 spectre_v2 spec_store_bypass srso +bogomips : 4492.85 +TLB size : 3584 4K pages +clflush size : 64 +cache_alignment : 64 +address sizes : 52 bits physical, 57 bits virtual +power management: ts ttp tm hwpstate cpb eff_freq_ro [13] [14] + +processor : 507 +vendor_id : AuthenticAMD +cpu family : 25 +model : 160 +model name : AMD EPYC 9754 128-Core Processor +stepping : 2 +microcode : 0xaa00212 +cpu MHz : 2250.000 +cache size : 1024 KB +physical id : 1 +siblings : 256 +core id : 91 +cpu cores : 128 +apicid : 439 +initial apicid : 439 +fpu : yes +fpu_exception : yes +cpuid level : 16 +wp : yes +flags : fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ht syscall nx mmxext fxsr_opt pdpe1gb rdtscp lm constant_tsc rep_good amd_lbr_v2 nopl nonstop_tsc cpuid extd_apicid aperfmperf rapl pni pclmulqdq monitor ssse3 fma cx16 pcid sse4_1 sse4_2 x2apic movbe popcnt aes xsave avx f16c rdrand lahf_lm cmp_legacy svm extapic cr8_legacy abm sse4a misalignsse 3dnowprefetch osvw ibs skinit wdt tce topoext perfctr_core perfctr_nb bpext perfctr_llc mwaitx cpb cat_l3 cdp_l3 hw_pstate ssbd mba perfmon_v2 ibrs ibpb stibp ibrs_enhanced vmmcall fsgsbase bmi1 avx2 smep bmi2 erms invpcid cqm rdt_a avx512f avx512dq rdseed adx smap avx512ifma clflushopt clwb avx512cd sha_ni avx512bw avx512vl xsaveopt xsavec xgetbv1 xsaves cqm_llc cqm_occup_llc cqm_mbm_total cqm_mbm_local user_shstk avx512_bf16 clzero irperf xsaveerptr rdpru wbnoinvd amd_ppin cppc arat npt lbrv svm_lock nrip_save tsc_scale vmcb_clean flushbyasid decodeassists pausefilter pfthreshold avic v_vmsave_vmload vgif x2avic v_spec_ctrl vnmi avx512vbmi umip pku ospke avx512_vbmi2 gfni vaes vpclmulqdq avx512_vnni avx512_bitalg avx512_vpopcntdq la57 rdpid overflow_recov succor smca fsrm flush_l1d debug_swap +bugs : sysret_ss_attrs spectre_v1 spectre_v2 spec_store_bypass srso +bogomips : 4492.85 +TLB size : 3584 4K pages +clflush size : 64 +cache_alignment : 64 +address sizes : 52 bits physical, 57 bits virtual +power management: ts ttp tm hwpstate cpb eff_freq_ro [13] [14] + +processor : 508 +vendor_id : AuthenticAMD +cpu family : 25 +model : 160 +model name : AMD EPYC 9754 128-Core Processor +stepping : 2 +microcode : 0xaa00212 +cpu MHz : 2250.000 +cache size : 1024 KB +physical id : 1 +siblings : 256 +core id : 92 +cpu cores : 128 +apicid : 441 +initial apicid : 441 +fpu : yes +fpu_exception : yes +cpuid level : 16 +wp : yes +flags : fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ht syscall nx mmxext fxsr_opt pdpe1gb rdtscp lm constant_tsc rep_good amd_lbr_v2 nopl nonstop_tsc cpuid extd_apicid aperfmperf rapl pni pclmulqdq monitor ssse3 fma cx16 pcid sse4_1 sse4_2 x2apic movbe popcnt aes xsave avx f16c rdrand lahf_lm cmp_legacy svm extapic cr8_legacy abm sse4a misalignsse 3dnowprefetch osvw ibs skinit wdt tce topoext perfctr_core perfctr_nb bpext perfctr_llc mwaitx cpb cat_l3 cdp_l3 hw_pstate ssbd mba perfmon_v2 ibrs ibpb stibp ibrs_enhanced vmmcall fsgsbase bmi1 avx2 smep bmi2 erms invpcid cqm rdt_a avx512f avx512dq rdseed adx smap avx512ifma clflushopt clwb avx512cd sha_ni avx512bw avx512vl xsaveopt xsavec xgetbv1 xsaves cqm_llc cqm_occup_llc cqm_mbm_total cqm_mbm_local user_shstk avx512_bf16 clzero irperf xsaveerptr rdpru wbnoinvd amd_ppin cppc arat npt lbrv svm_lock nrip_save tsc_scale vmcb_clean flushbyasid decodeassists pausefilter pfthreshold avic v_vmsave_vmload vgif x2avic v_spec_ctrl vnmi avx512vbmi umip pku ospke avx512_vbmi2 gfni vaes vpclmulqdq avx512_vnni avx512_bitalg avx512_vpopcntdq la57 rdpid overflow_recov succor smca fsrm flush_l1d debug_swap +bugs : sysret_ss_attrs spectre_v1 spectre_v2 spec_store_bypass srso +bogomips : 4492.85 +TLB size : 3584 4K pages +clflush size : 64 +cache_alignment : 64 +address sizes : 52 bits physical, 57 bits virtual +power management: ts ttp tm hwpstate cpb eff_freq_ro [13] [14] + +processor : 509 +vendor_id : AuthenticAMD +cpu family : 25 +model : 160 +model name : AMD EPYC 9754 128-Core Processor +stepping : 2 +microcode : 0xaa00212 +cpu MHz : 2250.000 +cache size : 1024 KB +physical id : 1 +siblings : 256 +core id : 93 +cpu cores : 128 +apicid : 443 +initial apicid : 443 +fpu : yes +fpu_exception : yes +cpuid level : 16 +wp : yes +flags : fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ht syscall nx mmxext fxsr_opt pdpe1gb rdtscp lm constant_tsc rep_good amd_lbr_v2 nopl nonstop_tsc cpuid extd_apicid aperfmperf rapl pni pclmulqdq monitor ssse3 fma cx16 pcid sse4_1 sse4_2 x2apic movbe popcnt aes xsave avx f16c rdrand lahf_lm cmp_legacy svm extapic cr8_legacy abm sse4a misalignsse 3dnowprefetch osvw ibs skinit wdt tce topoext perfctr_core perfctr_nb bpext perfctr_llc mwaitx cpb cat_l3 cdp_l3 hw_pstate ssbd mba perfmon_v2 ibrs ibpb stibp ibrs_enhanced vmmcall fsgsbase bmi1 avx2 smep bmi2 erms invpcid cqm rdt_a avx512f avx512dq rdseed adx smap avx512ifma clflushopt clwb avx512cd sha_ni avx512bw avx512vl xsaveopt xsavec xgetbv1 xsaves cqm_llc cqm_occup_llc cqm_mbm_total cqm_mbm_local user_shstk avx512_bf16 clzero irperf xsaveerptr rdpru wbnoinvd amd_ppin cppc arat npt lbrv svm_lock nrip_save tsc_scale vmcb_clean flushbyasid decodeassists pausefilter pfthreshold avic v_vmsave_vmload vgif x2avic v_spec_ctrl vnmi avx512vbmi umip pku ospke avx512_vbmi2 gfni vaes vpclmulqdq avx512_vnni avx512_bitalg avx512_vpopcntdq la57 rdpid overflow_recov succor smca fsrm flush_l1d debug_swap +bugs : sysret_ss_attrs spectre_v1 spectre_v2 spec_store_bypass srso +bogomips : 4492.85 +TLB size : 3584 4K pages +clflush size : 64 +cache_alignment : 64 +address sizes : 52 bits physical, 57 bits virtual +power management: ts ttp tm hwpstate cpb eff_freq_ro [13] [14] + +processor : 510 +vendor_id : AuthenticAMD +cpu family : 25 +model : 160 +model name : AMD EPYC 9754 128-Core Processor +stepping : 2 +microcode : 0xaa00212 +cpu MHz : 2250.000 +cache size : 1024 KB +physical id : 1 +siblings : 256 +core id : 94 +cpu cores : 128 +apicid : 445 +initial apicid : 445 +fpu : yes +fpu_exception : yes +cpuid level : 16 +wp : yes +flags : fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ht syscall nx mmxext fxsr_opt pdpe1gb rdtscp lm constant_tsc rep_good amd_lbr_v2 nopl nonstop_tsc cpuid extd_apicid aperfmperf rapl pni pclmulqdq monitor ssse3 fma cx16 pcid sse4_1 sse4_2 x2apic movbe popcnt aes xsave avx f16c rdrand lahf_lm cmp_legacy svm extapic cr8_legacy abm sse4a misalignsse 3dnowprefetch osvw ibs skinit wdt tce topoext perfctr_core perfctr_nb bpext perfctr_llc mwaitx cpb cat_l3 cdp_l3 hw_pstate ssbd mba perfmon_v2 ibrs ibpb stibp ibrs_enhanced vmmcall fsgsbase bmi1 avx2 smep bmi2 erms invpcid cqm rdt_a avx512f avx512dq rdseed adx smap avx512ifma clflushopt clwb avx512cd sha_ni avx512bw avx512vl xsaveopt xsavec xgetbv1 xsaves cqm_llc cqm_occup_llc cqm_mbm_total cqm_mbm_local user_shstk avx512_bf16 clzero irperf xsaveerptr rdpru wbnoinvd amd_ppin cppc arat npt lbrv svm_lock nrip_save tsc_scale vmcb_clean flushbyasid decodeassists pausefilter pfthreshold avic v_vmsave_vmload vgif x2avic v_spec_ctrl vnmi avx512vbmi umip pku ospke avx512_vbmi2 gfni vaes vpclmulqdq avx512_vnni avx512_bitalg avx512_vpopcntdq la57 rdpid overflow_recov succor smca fsrm flush_l1d debug_swap +bugs : sysret_ss_attrs spectre_v1 spectre_v2 spec_store_bypass srso +bogomips : 4492.85 +TLB size : 3584 4K pages +clflush size : 64 +cache_alignment : 64 +address sizes : 52 bits physical, 57 bits virtual +power management: ts ttp tm hwpstate cpb eff_freq_ro [13] [14] + +processor : 511 +vendor_id : AuthenticAMD +cpu family : 25 +model : 160 +model name : AMD EPYC 9754 128-Core Processor +stepping : 2 +microcode : 0xaa00212 +cpu MHz : 2250.000 +cache size : 1024 KB +physical id : 1 +siblings : 256 +core id : 95 +cpu cores : 128 +apicid : 447 +initial apicid : 447 +fpu : yes +fpu_exception : yes +cpuid level : 16 +wp : yes +flags : fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ht syscall nx mmxext fxsr_opt pdpe1gb rdtscp lm constant_tsc rep_good amd_lbr_v2 nopl nonstop_tsc cpuid extd_apicid aperfmperf rapl pni pclmulqdq monitor ssse3 fma cx16 pcid sse4_1 sse4_2 x2apic movbe popcnt aes xsave avx f16c rdrand lahf_lm cmp_legacy svm extapic cr8_legacy abm sse4a misalignsse 3dnowprefetch osvw ibs skinit wdt tce topoext perfctr_core perfctr_nb bpext perfctr_llc mwaitx cpb cat_l3 cdp_l3 hw_pstate ssbd mba perfmon_v2 ibrs ibpb stibp ibrs_enhanced vmmcall fsgsbase bmi1 avx2 smep bmi2 erms invpcid cqm rdt_a avx512f avx512dq rdseed adx smap avx512ifma clflushopt clwb avx512cd sha_ni avx512bw avx512vl xsaveopt xsavec xgetbv1 xsaves cqm_llc cqm_occup_llc cqm_mbm_total cqm_mbm_local user_shstk avx512_bf16 clzero irperf xsaveerptr rdpru wbnoinvd amd_ppin cppc arat npt lbrv svm_lock nrip_save tsc_scale vmcb_clean flushbyasid decodeassists pausefilter pfthreshold avic v_vmsave_vmload vgif x2avic v_spec_ctrl vnmi avx512vbmi umip pku ospke avx512_vbmi2 gfni vaes vpclmulqdq avx512_vnni avx512_bitalg avx512_vpopcntdq la57 rdpid overflow_recov succor smca fsrm flush_l1d debug_swap +bugs : sysret_ss_attrs spectre_v1 spectre_v2 spec_store_bypass srso +bogomips : 4492.85 +TLB size : 3584 4K pages +clflush size : 64 +cache_alignment : 64 +address sizes : 52 bits physical, 57 bits virtual +power management: ts ttp tm hwpstate cpb eff_freq_ro [13] [14] +`, + expCPUs: []results{ + { + cores: 29, + efficiency: 0, + threads: 29, + }, + }, + expThreadCount: 29, + }, + + "#5554 LXC docker output": { + input: `processor : 0 +vendor_id : AuthenticAMD +cpu family : 25 +model : 160 +model name : AMD EPYC 9754 128-Core Processor +stepping : 2 +microcode : 0xaa00212 +cpu MHz : 3094.910 +cache size : 1024 KB +physical id : 0 +siblings : 256 +core id : 0 +cpu cores : 128 +apicid : 0 +initial apicid : 0 +fpu : yes +fpu_exception : yes +cpuid level : 16 +wp : yes +flags : fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ht syscall nx mmxext fxsr_opt pdpe1gb rdtscp lm constant_tsc rep_good amd_lbr_v2 nopl nonstop_tsc cpuid extd_apicid aperfmperf rapl pni pclmulqdq monitor ssse3 fma cx16 pcid sse4_1 sse4_2 x2apic movbe popcnt aes xsave avx f16c rdrand lahf_lm cmp_legacy svm extapic cr8_legacy abm sse4a misalignsse 3dnowprefetch osvw ibs skinit wdt tce topoext perfctr_core perfctr_nb bpext perfctr_llc mwaitx cpb cat_l3 cdp_l3 hw_pstate ssbd mba perfmon_v2 ibrs ibpb stibp ibrs_enhanced vmmcall fsgsbase bmi1 avx2 smep bmi2 erms invpcid cqm rdt_a avx512f avx512dq rdseed adx smap avx512ifma clflushopt clwb avx512cd sha_ni avx512bw avx512vl xsaveopt xsavec xgetbv1 xsaves cqm_llc cqm_occup_llc cqm_mbm_total cqm_mbm_local user_shstk avx512_bf16 clzero irperf xsaveerptr rdpru wbnoinvd amd_ppin cppc arat npt lbrv svm_lock nrip_save tsc_scale vmcb_clean flushbyasid decodeassists pausefilter pfthreshold avic v_vmsave_vmload vgif x2avic v_spec_ctrl vnmi avx512vbmi umip pku ospke avx512_vbmi2 gfni vaes vpclmulqdq avx512_vnni avx512_bitalg avx512_vpopcntdq la57 rdpid overflow_recov succor smca fsrm flush_l1d debug_swap +bugs : sysret_ss_attrs spectre_v1 spectre_v2 spec_store_bypass srso +bogomips : 4492.85 +TLB size : 3584 4K pages +clflush size : 64 +cache_alignment : 64 +address sizes : 52 bits physical, 57 bits virtual +power management: ts ttp tm hwpstate cpb eff_freq_ro [13] [14] + +processor : 1 +vendor_id : AuthenticAMD +cpu family : 25 +model : 160 +model name : AMD EPYC 9754 128-Core Processor +stepping : 2 +microcode : 0xaa00212 +cpu MHz : 3094.470 +cache size : 1024 KB +physical id : 0 +siblings : 256 +core id : 1 +cpu cores : 128 +apicid : 2 +initial apicid : 2 +fpu : yes +fpu_exception : yes +cpuid level : 16 +wp : yes +flags : fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ht syscall nx mmxext fxsr_opt pdpe1gb rdtscp lm constant_tsc rep_good amd_lbr_v2 nopl nonstop_tsc cpuid extd_apicid aperfmperf rapl pni pclmulqdq monitor ssse3 fma cx16 pcid sse4_1 sse4_2 x2apic movbe popcnt aes xsave avx f16c rdrand lahf_lm cmp_legacy svm extapic cr8_legacy abm sse4a misalignsse 3dnowprefetch osvw ibs skinit wdt tce topoext perfctr_core perfctr_nb bpext perfctr_llc mwaitx cpb cat_l3 cdp_l3 hw_pstate ssbd mba perfmon_v2 ibrs ibpb stibp ibrs_enhanced vmmcall fsgsbase bmi1 avx2 smep bmi2 erms invpcid cqm rdt_a avx512f avx512dq rdseed adx smap avx512ifma clflushopt clwb avx512cd sha_ni avx512bw avx512vl xsaveopt xsavec xgetbv1 xsaves cqm_llc cqm_occup_llc cqm_mbm_total cqm_mbm_local user_shstk avx512_bf16 clzero irperf xsaveerptr rdpru wbnoinvd amd_ppin cppc arat npt lbrv svm_lock nrip_save tsc_scale vmcb_clean flushbyasid decodeassists pausefilter pfthreshold avic v_vmsave_vmload vgif x2avic v_spec_ctrl vnmi avx512vbmi umip pku ospke avx512_vbmi2 gfni vaes vpclmulqdq avx512_vnni avx512_bitalg avx512_vpopcntdq la57 rdpid overflow_recov succor smca fsrm flush_l1d debug_swap +bugs : sysret_ss_attrs spectre_v1 spectre_v2 spec_store_bypass srso +bogomips : 4492.85 +TLB size : 3584 4K pages +clflush size : 64 +cache_alignment : 64 +address sizes : 52 bits physical, 57 bits virtual +power management: ts ttp tm hwpstate cpb eff_freq_ro [13] [14] + +processor : 2 +vendor_id : AuthenticAMD +cpu family : 25 +model : 160 +model name : AMD EPYC 9754 128-Core Processor +stepping : 2 +microcode : 0xaa00212 +cpu MHz : 3094.918 +cache size : 1024 KB +physical id : 0 +siblings : 256 +core id : 2 +cpu cores : 128 +apicid : 4 +initial apicid : 4 +fpu : yes +fpu_exception : yes +cpuid level : 16 +wp : yes +flags : fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ht syscall nx mmxext fxsr_opt pdpe1gb rdtscp lm constant_tsc rep_good amd_lbr_v2 nopl nonstop_tsc cpuid extd_apicid aperfmperf rapl pni pclmulqdq monitor ssse3 fma cx16 pcid sse4_1 sse4_2 x2apic movbe popcnt aes xsave avx f16c rdrand lahf_lm cmp_legacy svm extapic cr8_legacy abm sse4a misalignsse 3dnowprefetch osvw ibs skinit wdt tce topoext perfctr_core perfctr_nb bpext perfctr_llc mwaitx cpb cat_l3 cdp_l3 hw_pstate ssbd mba perfmon_v2 ibrs ibpb stibp ibrs_enhanced vmmcall fsgsbase bmi1 avx2 smep bmi2 erms invpcid cqm rdt_a avx512f avx512dq rdseed adx smap avx512ifma clflushopt clwb avx512cd sha_ni avx512bw avx512vl xsaveopt xsavec xgetbv1 xsaves cqm_llc cqm_occup_llc cqm_mbm_total cqm_mbm_local user_shstk avx512_bf16 clzero irperf xsaveerptr rdpru wbnoinvd amd_ppin cppc arat npt lbrv svm_lock nrip_save tsc_scale vmcb_clean flushbyasid decodeassists pausefilter pfthreshold avic v_vmsave_vmload vgif x2avic v_spec_ctrl vnmi avx512vbmi umip pku ospke avx512_vbmi2 gfni vaes vpclmulqdq avx512_vnni avx512_bitalg avx512_vpopcntdq la57 rdpid overflow_recov succor smca fsrm flush_l1d debug_swap +bugs : sysret_ss_attrs spectre_v1 spectre_v2 spec_store_bypass srso +bogomips : 4492.85 +TLB size : 3584 4K pages +clflush size : 64 +cache_alignment : 64 +address sizes : 52 bits physical, 57 bits virtual +power management: ts ttp tm hwpstate cpb eff_freq_ro [13] [14] + +processor : 3 +vendor_id : AuthenticAMD +cpu family : 25 +model : 160 +model name : AMD EPYC 9754 128-Core Processor +stepping : 2 +microcode : 0xaa00212 +cpu MHz : 2250.000 +cache size : 1024 KB +physical id : 0 +siblings : 256 +core id : 3 +cpu cores : 128 +apicid : 6 +initial apicid : 6 +fpu : yes +fpu_exception : yes +cpuid level : 16 +wp : yes +flags : fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ht syscall nx mmxext fxsr_opt pdpe1gb rdtscp lm constant_tsc rep_good amd_lbr_v2 nopl nonstop_tsc cpuid extd_apicid aperfmperf rapl pni pclmulqdq monitor ssse3 fma cx16 pcid sse4_1 sse4_2 x2apic movbe popcnt aes xsave avx f16c rdrand lahf_lm cmp_legacy svm extapic cr8_legacy abm sse4a misalignsse 3dnowprefetch osvw ibs skinit wdt tce topoext perfctr_core perfctr_nb bpext perfctr_llc mwaitx cpb cat_l3 cdp_l3 hw_pstate ssbd mba perfmon_v2 ibrs ibpb stibp ibrs_enhanced vmmcall fsgsbase bmi1 avx2 smep bmi2 erms invpcid cqm rdt_a avx512f avx512dq rdseed adx smap avx512ifma clflushopt clwb avx512cd sha_ni avx512bw avx512vl xsaveopt xsavec xgetbv1 xsaves cqm_llc cqm_occup_llc cqm_mbm_total cqm_mbm_local user_shstk avx512_bf16 clzero irperf xsaveerptr rdpru wbnoinvd amd_ppin cppc arat npt lbrv svm_lock nrip_save tsc_scale vmcb_clean flushbyasid decodeassists pausefilter pfthreshold avic v_vmsave_vmload vgif x2avic v_spec_ctrl vnmi avx512vbmi umip pku ospke avx512_vbmi2 gfni vaes vpclmulqdq avx512_vnni avx512_bitalg avx512_vpopcntdq la57 rdpid overflow_recov succor smca fsrm flush_l1d debug_swap +bugs : sysret_ss_attrs spectre_v1 spectre_v2 spec_store_bypass srso +bogomips : 4492.85 +TLB size : 3584 4K pages +clflush size : 64 +cache_alignment : 64 +address sizes : 52 bits physical, 57 bits virtual +power management: ts ttp tm hwpstate cpb eff_freq_ro [13] [14] + +processor : 4 +vendor_id : AuthenticAMD +cpu family : 25 +model : 160 +model name : AMD EPYC 9754 128-Core Processor +stepping : 2 +microcode : 0xaa00212 +cpu MHz : 3090.662 +cache size : 1024 KB +physical id : 0 +siblings : 256 +core id : 4 +cpu cores : 128 +apicid : 8 +initial apicid : 8 +fpu : yes +fpu_exception : yes +cpuid level : 16 +wp : yes +flags : fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ht syscall nx mmxext fxsr_opt pdpe1gb rdtscp lm constant_tsc rep_good amd_lbr_v2 nopl nonstop_tsc cpuid extd_apicid aperfmperf rapl pni pclmulqdq monitor ssse3 fma cx16 pcid sse4_1 sse4_2 x2apic movbe popcnt aes xsave avx f16c rdrand lahf_lm cmp_legacy svm extapic cr8_legacy abm sse4a misalignsse 3dnowprefetch osvw ibs skinit wdt tce topoext perfctr_core perfctr_nb bpext perfctr_llc mwaitx cpb cat_l3 cdp_l3 hw_pstate ssbd mba perfmon_v2 ibrs ibpb stibp ibrs_enhanced vmmcall fsgsbase bmi1 avx2 smep bmi2 erms invpcid cqm rdt_a avx512f avx512dq rdseed adx smap avx512ifma clflushopt clwb avx512cd sha_ni avx512bw avx512vl xsaveopt xsavec xgetbv1 xsaves cqm_llc cqm_occup_llc cqm_mbm_total cqm_mbm_local user_shstk avx512_bf16 clzero irperf xsaveerptr rdpru wbnoinvd amd_ppin cppc arat npt lbrv svm_lock nrip_save tsc_scale vmcb_clean flushbyasid decodeassists pausefilter pfthreshold avic v_vmsave_vmload vgif x2avic v_spec_ctrl vnmi avx512vbmi umip pku ospke avx512_vbmi2 gfni vaes vpclmulqdq avx512_vnni avx512_bitalg avx512_vpopcntdq la57 rdpid overflow_recov succor smca fsrm flush_l1d debug_swap +bugs : sysret_ss_attrs spectre_v1 spectre_v2 spec_store_bypass srso +bogomips : 4492.85 +TLB size : 3584 4K pages +clflush size : 64 +cache_alignment : 64 +address sizes : 52 bits physical, 57 bits virtual +power management: ts ttp tm hwpstate cpb eff_freq_ro [13] [14] + +processor : 5 +vendor_id : AuthenticAMD +cpu family : 25 +model : 160 +model name : AMD EPYC 9754 128-Core Processor +stepping : 2 +microcode : 0xaa00212 +cpu MHz : 3093.734 +cache size : 1024 KB +physical id : 0 +siblings : 256 +core id : 5 +cpu cores : 128 +apicid : 10 +initial apicid : 10 +fpu : yes +fpu_exception : yes +cpuid level : 16 +wp : yes +flags : fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ht syscall nx mmxext fxsr_opt pdpe1gb rdtscp lm constant_tsc rep_good amd_lbr_v2 nopl nonstop_tsc cpuid extd_apicid aperfmperf rapl pni pclmulqdq monitor ssse3 fma cx16 pcid sse4_1 sse4_2 x2apic movbe popcnt aes xsave avx f16c rdrand lahf_lm cmp_legacy svm extapic cr8_legacy abm sse4a misalignsse 3dnowprefetch osvw ibs skinit wdt tce topoext perfctr_core perfctr_nb bpext perfctr_llc mwaitx cpb cat_l3 cdp_l3 hw_pstate ssbd mba perfmon_v2 ibrs ibpb stibp ibrs_enhanced vmmcall fsgsbase bmi1 avx2 smep bmi2 erms invpcid cqm rdt_a avx512f avx512dq rdseed adx smap avx512ifma clflushopt clwb avx512cd sha_ni avx512bw avx512vl xsaveopt xsavec xgetbv1 xsaves cqm_llc cqm_occup_llc cqm_mbm_total cqm_mbm_local user_shstk avx512_bf16 clzero irperf xsaveerptr rdpru wbnoinvd amd_ppin cppc arat npt lbrv svm_lock nrip_save tsc_scale vmcb_clean flushbyasid decodeassists pausefilter pfthreshold avic v_vmsave_vmload vgif x2avic v_spec_ctrl vnmi avx512vbmi umip pku ospke avx512_vbmi2 gfni vaes vpclmulqdq avx512_vnni avx512_bitalg avx512_vpopcntdq la57 rdpid overflow_recov succor smca fsrm flush_l1d debug_swap +bugs : sysret_ss_attrs spectre_v1 spectre_v2 spec_store_bypass srso +bogomips : 4492.85 +TLB size : 3584 4K pages +clflush size : 64 +cache_alignment : 64 +address sizes : 52 bits physical, 57 bits virtual +power management: ts ttp tm hwpstate cpb eff_freq_ro [13] [14] + +processor : 6 +vendor_id : AuthenticAMD +cpu family : 25 +model : 160 +model name : AMD EPYC 9754 128-Core Processor +stepping : 2 +microcode : 0xaa00212 +cpu MHz : 2250.000 +cache size : 1024 KB +physical id : 0 +siblings : 256 +core id : 6 +cpu cores : 128 +apicid : 12 +initial apicid : 12 +fpu : yes +fpu_exception : yes +cpuid level : 16 +wp : yes +flags : fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ht syscall nx mmxext fxsr_opt pdpe1gb rdtscp lm constant_tsc rep_good amd_lbr_v2 nopl nonstop_tsc cpuid extd_apicid aperfmperf rapl pni pclmulqdq monitor ssse3 fma cx16 pcid sse4_1 sse4_2 x2apic movbe popcnt aes xsave avx f16c rdrand lahf_lm cmp_legacy svm extapic cr8_legacy abm sse4a misalignsse 3dnowprefetch osvw ibs skinit wdt tce topoext perfctr_core perfctr_nb bpext perfctr_llc mwaitx cpb cat_l3 cdp_l3 hw_pstate ssbd mba perfmon_v2 ibrs ibpb stibp ibrs_enhanced vmmcall fsgsbase bmi1 avx2 smep bmi2 erms invpcid cqm rdt_a avx512f avx512dq rdseed adx smap avx512ifma clflushopt clwb avx512cd sha_ni avx512bw avx512vl xsaveopt xsavec xgetbv1 xsaves cqm_llc cqm_occup_llc cqm_mbm_total cqm_mbm_local user_shstk avx512_bf16 clzero irperf xsaveerptr rdpru wbnoinvd amd_ppin cppc arat npt lbrv svm_lock nrip_save tsc_scale vmcb_clean flushbyasid decodeassists pausefilter pfthreshold avic v_vmsave_vmload vgif x2avic v_spec_ctrl vnmi avx512vbmi umip pku ospke avx512_vbmi2 gfni vaes vpclmulqdq avx512_vnni avx512_bitalg avx512_vpopcntdq la57 rdpid overflow_recov succor smca fsrm flush_l1d debug_swap +bugs : sysret_ss_attrs spectre_v1 spectre_v2 spec_store_bypass srso +bogomips : 4492.85 +TLB size : 3584 4K pages +clflush size : 64 +cache_alignment : 64 +address sizes : 52 bits physical, 57 bits virtual +power management: ts ttp tm hwpstate cpb eff_freq_ro [13] [14] + +processor : 7 +vendor_id : AuthenticAMD +cpu family : 25 +model : 160 +model name : AMD EPYC 9754 128-Core Processor +stepping : 2 +microcode : 0xaa00212 +cpu MHz : 2250.000 +cache size : 1024 KB +physical id : 0 +siblings : 256 +core id : 7 +cpu cores : 128 +apicid : 14 +initial apicid : 14 +fpu : yes +fpu_exception : yes +cpuid level : 16 +wp : yes +flags : fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ht syscall nx mmxext fxsr_opt pdpe1gb rdtscp lm constant_tsc rep_good amd_lbr_v2 nopl nonstop_tsc cpuid extd_apicid aperfmperf rapl pni pclmulqdq monitor ssse3 fma cx16 pcid sse4_1 sse4_2 x2apic movbe popcnt aes xsave avx f16c rdrand lahf_lm cmp_legacy svm extapic cr8_legacy abm sse4a misalignsse 3dnowprefetch osvw ibs skinit wdt tce topoext perfctr_core perfctr_nb bpext perfctr_llc mwaitx cpb cat_l3 cdp_l3 hw_pstate ssbd mba perfmon_v2 ibrs ibpb stibp ibrs_enhanced vmmcall fsgsbase bmi1 avx2 smep bmi2 erms invpcid cqm rdt_a avx512f avx512dq rdseed adx smap avx512ifma clflushopt clwb avx512cd sha_ni avx512bw avx512vl xsaveopt xsavec xgetbv1 xsaves cqm_llc cqm_occup_llc cqm_mbm_total cqm_mbm_local user_shstk avx512_bf16 clzero irperf xsaveerptr rdpru wbnoinvd amd_ppin cppc arat npt lbrv svm_lock nrip_save tsc_scale vmcb_clean flushbyasid decodeassists pausefilter pfthreshold avic v_vmsave_vmload vgif x2avic v_spec_ctrl vnmi avx512vbmi umip pku ospke avx512_vbmi2 gfni vaes vpclmulqdq avx512_vnni avx512_bitalg avx512_vpopcntdq la57 rdpid overflow_recov succor smca fsrm flush_l1d debug_swap +bugs : sysret_ss_attrs spectre_v1 spectre_v2 spec_store_bypass srso +bogomips : 4492.85 +TLB size : 3584 4K pages +clflush size : 64 +cache_alignment : 64 +address sizes : 52 bits physical, 57 bits virtual +power management: ts ttp tm hwpstate cpb eff_freq_ro [13] [14] + +`, + expCPUs: []results{ + { + cores: 8, + efficiency: 0, + threads: 8, + }, + }, + expThreadCount: 8, + }, + + // exposed as 8 sockets, each with 1 core, no hyperthreading + "#7359 VMware multi-core core VM": { + input: `processor : 0 +vendor_id : GenuineIntel +cpu family : 6 +model : 106 +model name : Intel(R) Xeon(R) Gold 6326 CPU @ 2.90GHz +stepping : 6 +microcode : 0xd0003d1 +cpu MHz : 2893.202 +cache size : 24576 KB +physical id : 0 +siblings : 1 +core id : 0 +cpu cores : 1 +apicid : 0 +initial apicid : 0 +fpu : yes +fpu_exception : yes +cpuid level : 27 +wp : yes +flags : fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ss syscall nx pdpe1gb rdtscp lm constant_tsc arch_perfmon rep_good nopl xtopology tsc_reliable nonstop_tsc cpuid tsc_known_freq pni pclmulqdq ssse3 fma cx16 pcid sse4_1 sse4_2 x2apic movbe popcnt tsc_deadline_timer aes xsave avx f16c rdrand hypervisor lahf_lm abm 3dnowprefetch invpcid_single ssbd ibrs ibpb stibp ibrs_enhanced fsgsbase tsc_adjust bmi1 avx2 smep bmi2 erms invpcid avx512f avx512dq rdseed adx smap avx512ifma clflushopt clwb avx512cd sha_ni avx512bw avx512vl xsaveopt xsavec xgetbv1 xsaves wbnoinvd arat avx512vbmi umip pku ospke avx512_vbmi2 gfni vaes vpclmulqdq avx512_vnni avx512_bitalg avx512_vpopcntdq rdpid fsrm md_clear flush_l1d arch_capabilities +bugs : spectre_v1 spectre_v2 spec_store_bypass swapgs itlb_multihit mmio_stale_data eibrs_pbrsb gds bhi +bogomips : 5786.40 +clflush size : 64 +cache_alignment : 64 +address sizes : 45 bits physical, 48 bits virtual +power management: + +processor : 1 +vendor_id : GenuineIntel +cpu family : 6 +model : 106 +model name : Intel(R) Xeon(R) Gold 6326 CPU @ 2.90GHz +stepping : 6 +microcode : 0xd0003d1 +cpu MHz : 2893.202 +cache size : 24576 KB +physical id : 2 +siblings : 1 +core id : 0 +cpu cores : 1 +apicid : 2 +initial apicid : 2 +fpu : yes +fpu_exception : yes +cpuid level : 27 +wp : yes +flags : fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ss syscall nx pdpe1gb rdtscp lm constant_tsc arch_perfmon rep_good nopl xtopology tsc_reliable nonstop_tsc cpuid tsc_known_freq pni pclmulqdq ssse3 fma cx16 pcid sse4_1 sse4_2 x2apic movbe popcnt tsc_deadline_timer aes xsave avx f16c rdrand hypervisor lahf_lm abm 3dnowprefetch invpcid_single ssbd ibrs ibpb stibp ibrs_enhanced fsgsbase tsc_adjust bmi1 avx2 smep bmi2 erms invpcid avx512f avx512dq rdseed adx smap avx512ifma clflushopt clwb avx512cd sha_ni avx512bw avx512vl xsaveopt xsavec xgetbv1 xsaves wbnoinvd arat avx512vbmi umip pku ospke avx512_vbmi2 gfni vaes vpclmulqdq avx512_vnni avx512_bitalg avx512_vpopcntdq rdpid fsrm md_clear flush_l1d arch_capabilities +bugs : spectre_v1 spectre_v2 spec_store_bypass swapgs itlb_multihit mmio_stale_data eibrs_pbrsb gds bhi +bogomips : 5786.40 +clflush size : 64 +cache_alignment : 64 +address sizes : 45 bits physical, 48 bits virtual +power management: + +processor : 2 +vendor_id : GenuineIntel +cpu family : 6 +model : 106 +model name : Intel(R) Xeon(R) Gold 6326 CPU @ 2.90GHz +stepping : 6 +microcode : 0xd0003d1 +cpu MHz : 2893.202 +cache size : 24576 KB +physical id : 4 +siblings : 1 +core id : 0 +cpu cores : 1 +apicid : 4 +initial apicid : 4 +fpu : yes +fpu_exception : yes +cpuid level : 27 +wp : yes +flags : fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ss syscall nx pdpe1gb rdtscp lm constant_tsc arch_perfmon rep_good nopl xtopology tsc_reliable nonstop_tsc cpuid tsc_known_freq pni pclmulqdq ssse3 fma cx16 pcid sse4_1 sse4_2 x2apic movbe popcnt tsc_deadline_timer aes xsave avx f16c rdrand hypervisor lahf_lm abm 3dnowprefetch invpcid_single ssbd ibrs ibpb stibp ibrs_enhanced fsgsbase tsc_adjust bmi1 avx2 smep bmi2 erms invpcid avx512f avx512dq rdseed adx smap avx512ifma clflushopt clwb avx512cd sha_ni avx512bw avx512vl xsaveopt xsavec xgetbv1 xsaves wbnoinvd arat avx512vbmi umip pku ospke avx512_vbmi2 gfni vaes vpclmulqdq avx512_vnni avx512_bitalg avx512_vpopcntdq rdpid fsrm md_clear flush_l1d arch_capabilities +bugs : spectre_v1 spectre_v2 spec_store_bypass swapgs itlb_multihit mmio_stale_data eibrs_pbrsb gds bhi +bogomips : 5786.40 +clflush size : 64 +cache_alignment : 64 +address sizes : 45 bits physical, 48 bits virtual +power management: + +processor : 3 +vendor_id : GenuineIntel +cpu family : 6 +model : 106 +model name : Intel(R) Xeon(R) Gold 6326 CPU @ 2.90GHz +stepping : 6 +microcode : 0xd0003d1 +cpu MHz : 2893.202 +cache size : 24576 KB +physical id : 6 +siblings : 1 +core id : 0 +cpu cores : 1 +apicid : 6 +initial apicid : 6 +fpu : yes +fpu_exception : yes +cpuid level : 27 +wp : yes +flags : fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ss syscall nx pdpe1gb rdtscp lm constant_tsc arch_perfmon rep_good nopl xtopology tsc_reliable nonstop_tsc cpuid tsc_known_freq pni pclmulqdq ssse3 fma cx16 pcid sse4_1 sse4_2 x2apic movbe popcnt tsc_deadline_timer aes xsave avx f16c rdrand hypervisor lahf_lm abm 3dnowprefetch invpcid_single ssbd ibrs ibpb stibp ibrs_enhanced fsgsbase tsc_adjust bmi1 avx2 smep bmi2 erms invpcid avx512f avx512dq rdseed adx smap avx512ifma clflushopt clwb avx512cd sha_ni avx512bw avx512vl xsaveopt xsavec xgetbv1 xsaves wbnoinvd arat avx512vbmi umip pku ospke avx512_vbmi2 gfni vaes vpclmulqdq avx512_vnni avx512_bitalg avx512_vpopcntdq rdpid fsrm md_clear flush_l1d arch_capabilities +bugs : spectre_v1 spectre_v2 spec_store_bypass swapgs itlb_multihit mmio_stale_data eibrs_pbrsb gds bhi +bogomips : 5786.40 +clflush size : 64 +cache_alignment : 64 +address sizes : 45 bits physical, 48 bits virtual +power management: + +processor : 4 +vendor_id : GenuineIntel +cpu family : 6 +model : 106 +model name : Intel(R) Xeon(R) Gold 6326 CPU @ 2.90GHz +stepping : 6 +microcode : 0xd0003d1 +cpu MHz : 2893.202 +cache size : 24576 KB +physical id : 8 +siblings : 1 +core id : 0 +cpu cores : 1 +apicid : 8 +initial apicid : 8 +fpu : yes +fpu_exception : yes +cpuid level : 27 +wp : yes +flags : fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ss syscall nx pdpe1gb rdtscp lm constant_tsc arch_perfmon rep_good nopl xtopology tsc_reliable nonstop_tsc cpuid tsc_known_freq pni pclmulqdq ssse3 fma cx16 pcid sse4_1 sse4_2 x2apic movbe popcnt tsc_deadline_timer aes xsave avx f16c rdrand hypervisor lahf_lm abm 3dnowprefetch invpcid_single ssbd ibrs ibpb stibp ibrs_enhanced fsgsbase tsc_adjust bmi1 avx2 smep bmi2 erms invpcid avx512f avx512dq rdseed adx smap avx512ifma clflushopt clwb avx512cd sha_ni avx512bw avx512vl xsaveopt xsavec xgetbv1 xsaves wbnoinvd arat avx512vbmi umip pku ospke avx512_vbmi2 gfni vaes vpclmulqdq avx512_vnni avx512_bitalg avx512_vpopcntdq rdpid fsrm md_clear flush_l1d arch_capabilities +bugs : spectre_v1 spectre_v2 spec_store_bypass swapgs itlb_multihit mmio_stale_data eibrs_pbrsb gds bhi +bogomips : 5786.40 +clflush size : 64 +cache_alignment : 64 +address sizes : 45 bits physical, 48 bits virtual +power management: + +processor : 5 +vendor_id : GenuineIntel +cpu family : 6 +model : 106 +model name : Intel(R) Xeon(R) Gold 6326 CPU @ 2.90GHz +stepping : 6 +microcode : 0xd0003d1 +cpu MHz : 2893.202 +cache size : 24576 KB +physical id : 10 +siblings : 1 +core id : 0 +cpu cores : 1 +apicid : 10 +initial apicid : 10 +fpu : yes +fpu_exception : yes +cpuid level : 27 +wp : yes +flags : fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ss syscall nx pdpe1gb rdtscp lm constant_tsc arch_perfmon rep_good nopl xtopology tsc_reliable nonstop_tsc cpuid tsc_known_freq pni pclmulqdq ssse3 fma cx16 pcid sse4_1 sse4_2 x2apic movbe popcnt tsc_deadline_timer aes xsave avx f16c rdrand hypervisor lahf_lm abm 3dnowprefetch invpcid_single ssbd ibrs ibpb stibp ibrs_enhanced fsgsbase tsc_adjust bmi1 avx2 smep bmi2 erms invpcid avx512f avx512dq rdseed adx smap avx512ifma clflushopt clwb avx512cd sha_ni avx512bw avx512vl xsaveopt xsavec xgetbv1 xsaves wbnoinvd arat avx512vbmi umip pku ospke avx512_vbmi2 gfni vaes vpclmulqdq avx512_vnni avx512_bitalg avx512_vpopcntdq rdpid fsrm md_clear flush_l1d arch_capabilities +bugs : spectre_v1 spectre_v2 spec_store_bypass swapgs itlb_multihit mmio_stale_data eibrs_pbrsb gds bhi +bogomips : 5786.40 +clflush size : 64 +cache_alignment : 64 +address sizes : 45 bits physical, 48 bits virtual +power management: + +processor : 6 +vendor_id : GenuineIntel +cpu family : 6 +model : 106 +model name : Intel(R) Xeon(R) Gold 6326 CPU @ 2.90GHz +stepping : 6 +microcode : 0xd0003d1 +cpu MHz : 2893.202 +cache size : 24576 KB +physical id : 12 +siblings : 1 +core id : 0 +cpu cores : 1 +apicid : 12 +initial apicid : 12 +fpu : yes +fpu_exception : yes +cpuid level : 27 +wp : yes +flags : fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ss syscall nx pdpe1gb rdtscp lm constant_tsc arch_perfmon rep_good nopl xtopology tsc_reliable nonstop_tsc cpuid tsc_known_freq pni pclmulqdq ssse3 fma cx16 pcid sse4_1 sse4_2 x2apic movbe popcnt tsc_deadline_timer aes xsave avx f16c rdrand hypervisor lahf_lm abm 3dnowprefetch invpcid_single ssbd ibrs ibpb stibp ibrs_enhanced fsgsbase tsc_adjust bmi1 avx2 smep bmi2 erms invpcid avx512f avx512dq rdseed adx smap avx512ifma clflushopt clwb avx512cd sha_ni avx512bw avx512vl xsaveopt xsavec xgetbv1 xsaves wbnoinvd arat avx512vbmi umip pku ospke avx512_vbmi2 gfni vaes vpclmulqdq avx512_vnni avx512_bitalg avx512_vpopcntdq rdpid fsrm md_clear flush_l1d arch_capabilities +bugs : spectre_v1 spectre_v2 spec_store_bypass swapgs itlb_multihit mmio_stale_data eibrs_pbrsb gds bhi +bogomips : 5786.40 +clflush size : 64 +cache_alignment : 64 +address sizes : 45 bits physical, 48 bits virtual +power management: + +processor : 7 +vendor_id : GenuineIntel +cpu family : 6 +model : 106 +model name : Intel(R) Xeon(R) Gold 6326 CPU @ 2.90GHz +stepping : 6 +microcode : 0xd0003d1 +cpu MHz : 2893.202 +cache size : 24576 KB +physical id : 14 +siblings : 1 +core id : 0 +cpu cores : 1 +apicid : 14 +initial apicid : 14 +fpu : yes +fpu_exception : yes +cpuid level : 27 +wp : yes +flags : fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ss syscall nx pdpe1gb rdtscp lm constant_tsc arch_perfmon rep_good nopl xtopology tsc_reliable nonstop_tsc cpuid tsc_known_freq pni pclmulqdq ssse3 fma cx16 pcid sse4_1 sse4_2 x2apic movbe popcnt tsc_deadline_timer aes xsave avx f16c rdrand hypervisor lahf_lm abm 3dnowprefetch invpcid_single ssbd ibrs ibpb stibp ibrs_enhanced fsgsbase tsc_adjust bmi1 avx2 smep bmi2 erms invpcid avx512f avx512dq rdseed adx smap avx512ifma clflushopt clwb avx512cd sha_ni avx512bw avx512vl xsaveopt xsavec xgetbv1 xsaves wbnoinvd arat avx512vbmi umip pku ospke avx512_vbmi2 gfni vaes vpclmulqdq avx512_vnni avx512_bitalg avx512_vpopcntdq rdpid fsrm md_clear flush_l1d arch_capabilities +bugs : spectre_v1 spectre_v2 spec_store_bypass swapgs itlb_multihit mmio_stale_data eibrs_pbrsb gds bhi +bogomips : 5786.40 +clflush size : 64 +cache_alignment : 64 +address sizes : 45 bits physical, 48 bits virtual +power management: +`, + expCPUs: []results{ + { + cores: 1, + efficiency: 0, + threads: 1, + }, + { + cores: 1, + efficiency: 0, + threads: 1, + }, + { + cores: 1, + efficiency: 0, + threads: 1, + }, + { + cores: 1, + efficiency: 0, + threads: 1, + }, + { + cores: 1, + efficiency: 0, + threads: 1, + }, + { + cores: 1, + efficiency: 0, + threads: 1, + }, + { + cores: 1, + efficiency: 0, + threads: 1, + }, + { + cores: 1, + efficiency: 0, + threads: 1, + }, + }, + expThreadCount: 8, + }, + + // Emulated dual socket setup, 2 sockets, 2 cores each, with hyperthreading + "#7287 HyperV 2 socket exposed to VM": { + input: `processor : 0 +vendor_id : AuthenticAMD +cpu family : 23 +model : 96 +model name : AMD Ryzen 3 4100 4-Core Processor +stepping : 1 +microcode : 0xffffffff +cpu MHz : 3792.747 +cache size : 512 KB +physical id : 0 +siblings : 4 +core id : 0 +cpu cores : 2 +apicid : 0 +initial apicid : 0 +fpu : yes +fpu_exception : yes +cpuid level : 13 +wp : yes +flags : fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ht syscall nx mmxext fxsr_opt pdpe1gb rdtscp lm constant_tsc rep_good nopl tsc_reliable nonstop_tsc cpuid extd_apicid aperfmperf pni pclmulqdq ssse3 fma cx16 sse4_1 sse4_2 movbe popcnt aes xsave avx f16c rdrand hypervisor lahf_lm cmp_legacy cr8_legacy abm sse4a misalignsse 3dnowprefetch osvw topoext ssbd ibrs ibpb stibp vmmcall fsgsbase bmi1 avx2 smep bmi2 rdseed adx smap clflushopt clwb sha_ni xsaveopt xsavec xgetbv1 clzero xsaveerptr rdpru arat umip rdpid +bugs : sysret_ss_attrs null_seg spectre_v1 spectre_v2 spec_store_bypass retbleed smt_rsb srso +bogomips : 7585.49 +TLB size : 3072 4K pages +clflush size : 64 +cache_alignment : 64 +address sizes : 48 bits physical, 48 bits virtual +power management: + +processor : 1 +vendor_id : AuthenticAMD +cpu family : 23 +model : 96 +model name : AMD Ryzen 3 4100 4-Core Processor +stepping : 1 +microcode : 0xffffffff +cpu MHz : 3792.747 +cache size : 512 KB +physical id : 0 +siblings : 4 +core id : 0 +cpu cores : 2 +apicid : 1 +initial apicid : 1 +fpu : yes +fpu_exception : yes +cpuid level : 13 +wp : yes +flags : fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ht syscall nx mmxext fxsr_opt pdpe1gb rdtscp lm constant_tsc rep_good nopl tsc_reliable nonstop_tsc cpuid extd_apicid aperfmperf pni pclmulqdq ssse3 fma cx16 sse4_1 sse4_2 movbe popcnt aes xsave avx f16c rdrand hypervisor lahf_lm cmp_legacy cr8_legacy abm sse4a misalignsse 3dnowprefetch osvw topoext ssbd ibrs ibpb stibp vmmcall fsgsbase bmi1 avx2 smep bmi2 rdseed adx smap clflushopt clwb sha_ni xsaveopt xsavec xgetbv1 clzero xsaveerptr rdpru arat umip rdpid +bugs : sysret_ss_attrs null_seg spectre_v1 spectre_v2 spec_store_bypass retbleed smt_rsb srso +bogomips : 7585.49 +TLB size : 3072 4K pages +clflush size : 64 +cache_alignment : 64 +address sizes : 48 bits physical, 48 bits virtual +power management: + +processor : 2 +vendor_id : AuthenticAMD +cpu family : 23 +model : 96 +model name : AMD Ryzen 3 4100 4-Core Processor +stepping : 1 +microcode : 0xffffffff +cpu MHz : 3792.747 +cache size : 512 KB +physical id : 0 +siblings : 4 +core id : 1 +cpu cores : 2 +apicid : 2 +initial apicid : 2 +fpu : yes +fpu_exception : yes +cpuid level : 13 +wp : yes +flags : fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ht syscall nx mmxext fxsr_opt pdpe1gb rdtscp lm constant_tsc rep_good nopl tsc_reliable nonstop_tsc cpuid extd_apicid aperfmperf pni pclmulqdq ssse3 fma cx16 sse4_1 sse4_2 movbe popcnt aes xsave avx f16c rdrand hypervisor lahf_lm cmp_legacy cr8_legacy abm sse4a misalignsse 3dnowprefetch osvw topoext ssbd ibrs ibpb stibp vmmcall fsgsbase bmi1 avx2 smep bmi2 rdseed adx smap clflushopt clwb sha_ni xsaveopt xsavec xgetbv1 clzero xsaveerptr rdpru arat umip rdpid +bugs : sysret_ss_attrs null_seg spectre_v1 spectre_v2 spec_store_bypass retbleed smt_rsb srso +bogomips : 7585.49 +TLB size : 3072 4K pages +clflush size : 64 +cache_alignment : 64 +address sizes : 48 bits physical, 48 bits virtual +power management: + +processor : 3 +vendor_id : AuthenticAMD +cpu family : 23 +model : 96 +model name : AMD Ryzen 3 4100 4-Core Processor +stepping : 1 +microcode : 0xffffffff +cpu MHz : 3792.747 +cache size : 512 KB +physical id : 0 +siblings : 4 +core id : 1 +cpu cores : 2 +apicid : 3 +initial apicid : 3 +fpu : yes +fpu_exception : yes +cpuid level : 13 +wp : yes +flags : fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ht syscall nx mmxext fxsr_opt pdpe1gb rdtscp lm constant_tsc rep_good nopl tsc_reliable nonstop_tsc cpuid extd_apicid aperfmperf pni pclmulqdq ssse3 fma cx16 sse4_1 sse4_2 movbe popcnt aes xsave avx f16c rdrand hypervisor lahf_lm cmp_legacy cr8_legacy abm sse4a misalignsse 3dnowprefetch osvw topoext ssbd ibrs ibpb stibp vmmcall fsgsbase bmi1 avx2 smep bmi2 rdseed adx smap clflushopt clwb sha_ni xsaveopt xsavec xgetbv1 clzero xsaveerptr rdpru arat umip rdpid +bugs : sysret_ss_attrs null_seg spectre_v1 spectre_v2 spec_store_bypass retbleed smt_rsb srso +bogomips : 7585.49 +TLB size : 3072 4K pages +clflush size : 64 +cache_alignment : 64 +address sizes : 48 bits physical, 48 bits virtual +power management: + +processor : 4 +vendor_id : AuthenticAMD +cpu family : 23 +model : 96 +model name : AMD Ryzen 3 4100 4-Core Processor +stepping : 1 +microcode : 0xffffffff +cpu MHz : 3792.747 +cache size : 512 KB +physical id : 1 +siblings : 4 +core id : 0 +cpu cores : 2 +apicid : 4 +initial apicid : 4 +fpu : yes +fpu_exception : yes +cpuid level : 13 +wp : yes +flags : fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ht syscall nx mmxext fxsr_opt pdpe1gb rdtscp lm constant_tsc rep_good nopl tsc_reliable nonstop_tsc cpuid extd_apicid aperfmperf pni pclmulqdq ssse3 fma cx16 sse4_1 sse4_2 movbe popcnt aes xsave avx f16c rdrand hypervisor lahf_lm cmp_legacy cr8_legacy abm sse4a misalignsse 3dnowprefetch osvw topoext ssbd ibrs ibpb stibp vmmcall fsgsbase bmi1 avx2 smep bmi2 rdseed adx smap clflushopt clwb sha_ni xsaveopt xsavec xgetbv1 clzero xsaveerptr rdpru arat umip rdpid +bugs : sysret_ss_attrs null_seg spectre_v1 spectre_v2 spec_store_bypass retbleed smt_rsb srso +bogomips : 7634.51 +TLB size : 3072 4K pages +clflush size : 64 +cache_alignment : 64 +address sizes : 48 bits physical, 48 bits virtual +power management: + +processor : 5 +vendor_id : AuthenticAMD +cpu family : 23 +model : 96 +model name : AMD Ryzen 3 4100 4-Core Processor +stepping : 1 +microcode : 0xffffffff +cpu MHz : 3792.747 +cache size : 512 KB +physical id : 1 +siblings : 4 +core id : 0 +cpu cores : 2 +apicid : 5 +initial apicid : 5 +fpu : yes +fpu_exception : yes +cpuid level : 13 +wp : yes +flags : fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ht syscall nx mmxext fxsr_opt pdpe1gb rdtscp lm constant_tsc rep_good nopl tsc_reliable nonstop_tsc cpuid extd_apicid aperfmperf pni pclmulqdq ssse3 fma cx16 sse4_1 sse4_2 movbe popcnt aes xsave avx f16c rdrand hypervisor lahf_lm cmp_legacy cr8_legacy abm sse4a misalignsse 3dnowprefetch osvw topoext ssbd ibrs ibpb stibp vmmcall fsgsbase bmi1 avx2 smep bmi2 rdseed adx smap clflushopt clwb sha_ni xsaveopt xsavec xgetbv1 clzero xsaveerptr rdpru arat umip rdpid +bugs : sysret_ss_attrs null_seg spectre_v1 spectre_v2 spec_store_bypass retbleed smt_rsb srso +bogomips : 7634.51 +TLB size : 3072 4K pages +clflush size : 64 +cache_alignment : 64 +address sizes : 48 bits physical, 48 bits virtual +power management: + +processor : 6 +vendor_id : AuthenticAMD +cpu family : 23 +model : 96 +model name : AMD Ryzen 3 4100 4-Core Processor +stepping : 1 +microcode : 0xffffffff +cpu MHz : 3792.747 +cache size : 512 KB +physical id : 1 +siblings : 4 +core id : 1 +cpu cores : 2 +apicid : 6 +initial apicid : 6 +fpu : yes +fpu_exception : yes +cpuid level : 13 +wp : yes +flags : fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ht syscall nx mmxext fxsr_opt pdpe1gb rdtscp lm constant_tsc rep_good nopl tsc_reliable nonstop_tsc cpuid extd_apicid aperfmperf pni pclmulqdq ssse3 fma cx16 sse4_1 sse4_2 movbe popcnt aes xsave avx f16c rdrand hypervisor lahf_lm cmp_legacy cr8_legacy abm sse4a misalignsse 3dnowprefetch osvw topoext ssbd ibrs ibpb stibp vmmcall fsgsbase bmi1 avx2 smep bmi2 rdseed adx smap clflushopt clwb sha_ni xsaveopt xsavec xgetbv1 clzero xsaveerptr rdpru arat umip rdpid +bugs : sysret_ss_attrs null_seg spectre_v1 spectre_v2 spec_store_bypass retbleed smt_rsb srso +bogomips : 7634.51 +TLB size : 3072 4K pages +clflush size : 64 +cache_alignment : 64 +address sizes : 48 bits physical, 48 bits virtual +power management: + +processor : 7 +vendor_id : AuthenticAMD +cpu family : 23 +model : 96 +model name : AMD Ryzen 3 4100 4-Core Processor +stepping : 1 +microcode : 0xffffffff +cpu MHz : 3688.684 +cache size : 512 KB +physical id : 1 +siblings : 4 +core id : 1 +cpu cores : 2 +apicid : 7 +initial apicid : 7 +fpu : yes +fpu_exception : yes +cpuid level : 13 +wp : yes +flags : fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ht syscall nx mmxext fxsr_opt pdpe1gb rdtscp lm constant_tsc rep_good nopl tsc_reliable nonstop_tsc cpuid extd_apicid aperfmperf pni pclmulqdq ssse3 fma cx16 sse4_1 sse4_2 movbe popcnt aes xsave avx f16c rdrand hypervisor lahf_lm cmp_legacy cr8_legacy abm sse4a misalignsse 3dnowprefetch osvw topoext ssbd ibrs ibpb stibp vmmcall fsgsbase bmi1 avx2 smep bmi2 rdseed adx smap clflushopt clwb sha_ni xsaveopt xsavec xgetbv1 clzero xsaveerptr rdpru arat umip rdpid +bugs : sysret_ss_attrs null_seg spectre_v1 spectre_v2 spec_store_bypass retbleed smt_rsb srso +bogomips : 7634.51 +TLB size : 3072 4K pages +clflush size : 64 +cache_alignment : 64 +address sizes : 48 bits physical, 48 bits virtual +power management: +`, + expCPUs: []results{ + { + cores: 2, + efficiency: 0, + threads: 4, + }, + { + cores: 2, + efficiency: 0, + threads: 4, + }, + }, + expThreadCount: 4, + }, + } + for k, v := range testCases { + t.Run(k, func(t *testing.T) { + buf := bytes.NewBufferString(v.input) + cpus, err := linuxCPUDetails(buf) + if err != nil { + t.Fatal(err) + } + + slog.Info("example", "scenario", k, "cpus", cpus) + si := SystemInfo{ + System: CPUInfo{ + CPUs: cpus, + }, + } + threadCount := si.GetOptimalThreadCount() + if len(v.expCPUs) != len(cpus) { + t.Fatalf("incorrect number of sockets: expected:%v got:%v", v.expCPUs, cpus) + } + for i, c := range cpus { + if c.CoreCount != v.expCPUs[i].cores { + t.Fatalf("incorrect number of cores: expected:%v got:%v", v.expCPUs[i], c) + } + if c.EfficiencyCoreCount != v.expCPUs[i].efficiency { + t.Fatalf("incorrect number of efficiency cores: expected:%v got:%v", v.expCPUs[i], c) + } + if c.ThreadCount != v.expCPUs[i].threads { + t.Fatalf("incorrect number of threads: expected:%v got:%v", v.expCPUs[i], c) + } + } + + if threadCount != v.expThreadCount { + t.Fatalf("incorrect thread count expected:%d got:%d", v.expThreadCount, threadCount) + } + }) + } +} diff --git a/discover/gpu_oneapi.go b/discover/gpu_oneapi.go new file mode 100644 index 0000000..77941f5 --- /dev/null +++ b/discover/gpu_oneapi.go @@ -0,0 +1,21 @@ +//go:build linux || windows + +package discover + +import ( + "log/slog" + "strings" +) + +func oneapiGetVisibleDevicesEnv(gpuInfo []GpuInfo) (string, string) { + ids := []string{} + for _, info := range gpuInfo { + if info.Library != "oneapi" { + // TODO shouldn't happen if things are wired correctly... + slog.Debug("oneapiGetVisibleDevicesEnv skipping over non-sycl device", "library", info.Library) + continue + } + ids = append(ids, info.ID) + } + return "ONEAPI_DEVICE_SELECTOR", "level_zero:" + strings.Join(ids, ",") +} diff --git a/discover/gpu_test.go b/discover/gpu_test.go new file mode 100644 index 0000000..0c6ef7b --- /dev/null +++ b/discover/gpu_test.go @@ -0,0 +1,60 @@ +package discover + +import ( + "runtime" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestBasicGetGPUInfo(t *testing.T) { + info := GetGPUInfo() + assert.NotEmpty(t, len(info)) + assert.Contains(t, "cuda rocm cpu metal", info[0].Library) + if info[0].Library != "cpu" { + assert.Greater(t, info[0].TotalMemory, uint64(0)) + assert.Greater(t, info[0].FreeMemory, uint64(0)) + } +} + +func TestCPUMemInfo(t *testing.T) { + info, err := GetCPUMem() + require.NoError(t, err) + switch runtime.GOOS { + case "darwin": + t.Skip("CPU memory not populated on darwin") + case "linux", "windows": + assert.Greater(t, info.TotalMemory, uint64(0)) + assert.Greater(t, info.FreeMemory, uint64(0)) + default: + return + } +} + +func TestByLibrary(t *testing.T) { + type testCase struct { + input []GpuInfo + expect int + } + + testCases := map[string]*testCase{ + "empty": {input: []GpuInfo{}, expect: 0}, + "cpu": {input: []GpuInfo{{Library: "cpu"}}, expect: 1}, + "cpu + GPU": {input: []GpuInfo{{Library: "cpu"}, {Library: "cuda"}}, expect: 2}, + "cpu + 2 GPU no variant": {input: []GpuInfo{{Library: "cpu"}, {Library: "cuda"}, {Library: "cuda"}}, expect: 2}, + "cpu + 2 GPU same variant": {input: []GpuInfo{{Library: "cpu"}, {Library: "cuda", Variant: "v11"}, {Library: "cuda", Variant: "v11"}}, expect: 2}, + "cpu + 2 GPU diff variant": {input: []GpuInfo{{Library: "cpu"}, {Library: "cuda", Variant: "v11"}, {Library: "cuda", Variant: "v12"}}, expect: 3}, + } + + for k, v := range testCases { + t.Run(k, func(t *testing.T) { + resp := (GpuInfoList)(v.input).ByLibrary() + if len(resp) != v.expect { + t.Fatalf("expected length %d, got %d => %+v", v.expect, len(resp), resp) + } + }) + } +} + +// TODO - add some logic to figure out card type through other means and actually verify we got back what we expected diff --git a/discover/gpu_windows.go b/discover/gpu_windows.go new file mode 100644 index 0000000..2dc2f07 --- /dev/null +++ b/discover/gpu_windows.go @@ -0,0 +1,234 @@ +package discover + +import ( + "fmt" + "log/slog" + "syscall" + "unsafe" +) + +type MEMORYSTATUSEX struct { + length uint32 + MemoryLoad uint32 + TotalPhys uint64 + AvailPhys uint64 + TotalPageFile uint64 + AvailPageFile uint64 + TotalVirtual uint64 + AvailVirtual uint64 + AvailExtendedVirtual uint64 +} + +var ( + k32 = syscall.NewLazyDLL("kernel32.dll") + globalMemoryStatusExProc = k32.NewProc("GlobalMemoryStatusEx") + sizeofMemoryStatusEx = uint32(unsafe.Sizeof(MEMORYSTATUSEX{})) + GetLogicalProcessorInformationEx = k32.NewProc("GetLogicalProcessorInformationEx") +) + +var CudartGlobs = []string{ + "c:\\Program Files\\NVIDIA GPU Computing Toolkit\\CUDA\\v*\\bin\\cudart64_*.dll", +} + +var NvmlGlobs = []string{ + "c:\\Windows\\System32\\nvml.dll", +} + +var NvcudaGlobs = []string{ + "c:\\windows\\system*\\nvcuda.dll", +} + +var OneapiGlobs = []string{ + "c:\\Windows\\System32\\DriverStore\\FileRepository\\*\\ze_intel_gpu64.dll", +} + +var ( + CudartMgmtName = "cudart64_*.dll" + NvcudaMgmtName = "nvcuda.dll" + NvmlMgmtName = "nvml.dll" + OneapiMgmtName = "ze_intel_gpu64.dll" +) + +func GetCPUMem() (memInfo, error) { + memStatus := MEMORYSTATUSEX{length: sizeofMemoryStatusEx} + r1, _, err := globalMemoryStatusExProc.Call(uintptr(unsafe.Pointer(&memStatus))) + if r1 == 0 { + return memInfo{}, fmt.Errorf("GlobalMemoryStatusEx failed: %w", err) + } + return memInfo{TotalMemory: memStatus.TotalPhys, FreeMemory: memStatus.AvailPhys, FreeSwap: memStatus.AvailPageFile}, nil +} + +type LOGICAL_PROCESSOR_RELATIONSHIP uint32 + +const ( + RelationProcessorCore LOGICAL_PROCESSOR_RELATIONSHIP = iota + RelationNumaNode + RelationCache + RelationProcessorPackage + RelationGroup + RelationProcessorDie + RelationNumaNodeEx + RelationProcessorModule +) +const RelationAll LOGICAL_PROCESSOR_RELATIONSHIP = 0xffff + +type GROUP_AFFINITY struct { + Mask uintptr // KAFFINITY + Group uint16 + Reserved [3]uint16 +} + +type PROCESSOR_RELATIONSHIP struct { + Flags byte + EfficiencyClass byte + Reserved [20]byte + GroupCount uint16 + GroupMask [1]GROUP_AFFINITY // len GroupCount +} + +// Omitted unused structs: NUMA_NODE_RELATIONSHIP CACHE_RELATIONSHIP GROUP_RELATIONSHIP + +type SYSTEM_LOGICAL_PROCESSOR_INFORMATION_EX struct { + Relationship LOGICAL_PROCESSOR_RELATIONSHIP + Size uint32 + U [1]byte // Union len Size + // PROCESSOR_RELATIONSHIP + // NUMA_NODE_RELATIONSHIP + // CACHE_RELATIONSHIP + // GROUP_RELATIONSHIP +} + +func (group *GROUP_AFFINITY) IsMember(target *GROUP_AFFINITY) bool { + if group == nil || target == nil { + return false + } + return group.Mask&target.Mask != 0 +} + +type winPackage struct { + groups []*GROUP_AFFINITY + coreCount int // performance cores = coreCount - efficiencyCoreCount + efficiencyCoreCount int + threadCount int +} + +func (pkg *winPackage) IsMember(target *GROUP_AFFINITY) bool { + for _, group := range pkg.groups { + if group.IsMember(target) { + return true + } + } + return false +} + +func getLogicalProcessorInformationEx() ([]byte, error) { + buf := make([]byte, 1) + bufSize := len(buf) + ret, _, err := GetLogicalProcessorInformationEx.Call( + uintptr(RelationAll), + uintptr(unsafe.Pointer(&buf[0])), + uintptr(unsafe.Pointer(&bufSize)), + ) + if ret != 0 { + return nil, fmt.Errorf("failed to determine size info ret:%d %w", ret, err) + } + + buf = make([]byte, bufSize) + ret, _, err = GetLogicalProcessorInformationEx.Call( + uintptr(RelationAll), + uintptr(unsafe.Pointer(&buf[0])), + uintptr(unsafe.Pointer(&bufSize)), + ) + if ret == 0 { + return nil, fmt.Errorf("failed to gather processor information ret:%d buflen:%d %w", ret, bufSize, err) + } + return buf, nil +} + +func processSystemLogicalProcessorInforationList(buf []byte) []*winPackage { + var slpi *SYSTEM_LOGICAL_PROCESSOR_INFORMATION_EX + // Find all the packages first + packages := []*winPackage{} + for bufOffset := 0; bufOffset < len(buf); bufOffset += int(slpi.Size) { + slpi = (*SYSTEM_LOGICAL_PROCESSOR_INFORMATION_EX)(unsafe.Pointer(&buf[bufOffset])) + if slpi.Relationship != RelationProcessorPackage { + continue + } + pr := (*PROCESSOR_RELATIONSHIP)(unsafe.Pointer(&slpi.U[0])) + pkg := &winPackage{} + ga0 := unsafe.Pointer(&pr.GroupMask[0]) + for j := range pr.GroupCount { + gm := (*GROUP_AFFINITY)(unsafe.Pointer(uintptr(ga0) + uintptr(j)*unsafe.Sizeof(GROUP_AFFINITY{}))) + pkg.groups = append(pkg.groups, gm) + } + packages = append(packages, pkg) + } + + slog.Info("packages", "count", len(packages)) + + // To identify efficiency cores we have to compare the relative values + // Larger values are "less efficient" (aka, more performant) + var maxEfficiencyClass byte + for bufOffset := 0; bufOffset < len(buf); bufOffset += int(slpi.Size) { + slpi = (*SYSTEM_LOGICAL_PROCESSOR_INFORMATION_EX)(unsafe.Pointer(&buf[bufOffset])) + if slpi.Relationship != RelationProcessorCore { + continue + } + pr := (*PROCESSOR_RELATIONSHIP)(unsafe.Pointer(&slpi.U[0])) + if pr.EfficiencyClass > maxEfficiencyClass { + maxEfficiencyClass = pr.EfficiencyClass + } + } + if maxEfficiencyClass > 0 { + slog.Info("efficiency cores detected", "maxEfficiencyClass", maxEfficiencyClass) + } + + // then match up the Cores to the Packages, count up cores, threads and efficiency cores + for bufOffset := 0; bufOffset < len(buf); bufOffset += int(slpi.Size) { + slpi = (*SYSTEM_LOGICAL_PROCESSOR_INFORMATION_EX)(unsafe.Pointer(&buf[bufOffset])) + if slpi.Relationship != RelationProcessorCore { + continue + } + pr := (*PROCESSOR_RELATIONSHIP)(unsafe.Pointer(&slpi.U[0])) + ga0 := unsafe.Pointer(&pr.GroupMask[0]) + for j := range pr.GroupCount { + gm := (*GROUP_AFFINITY)(unsafe.Pointer(uintptr(ga0) + uintptr(j)*unsafe.Sizeof(GROUP_AFFINITY{}))) + for _, pkg := range packages { + if pkg.IsMember(gm) { + pkg.coreCount++ + if pr.Flags == 0 { + pkg.threadCount++ + } else { + pkg.threadCount += 2 + } + if pr.EfficiencyClass < maxEfficiencyClass { + pkg.efficiencyCoreCount++ + } + } + } + } + } + + // Summarize the results + for i, pkg := range packages { + slog.Info("", "package", i, "cores", pkg.coreCount, "efficiency", pkg.efficiencyCoreCount, "threads", pkg.threadCount) + } + + return packages +} + +func GetCPUDetails() ([]CPU, error) { + buf, err := getLogicalProcessorInformationEx() + if err != nil { + return nil, err + } + packages := processSystemLogicalProcessorInforationList(buf) + cpus := make([]CPU, len(packages)) + + for i, pkg := range packages { + cpus[i].CoreCount = pkg.coreCount + cpus[i].EfficiencyCoreCount = pkg.efficiencyCoreCount + cpus[i].ThreadCount = pkg.threadCount + } + return cpus, nil +} diff --git a/discover/gpu_windows_test.go b/discover/gpu_windows_test.go new file mode 100644 index 0000000..c4daa7b --- /dev/null +++ b/discover/gpu_windows_test.go @@ -0,0 +1,77 @@ +package discover + +import "testing" + +func TestProcessSystemLogicalProcessorInforationList(t *testing.T) { + type pkgs struct { + cores int + efficiency int + threads int + } + type testCase struct { + input []byte + expected []pkgs + } + testCases := map[string]*testCase{ + "AMD64 Family 25 Model 97 Stepping 2 ": { + input: []byte{ + 0x3, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0xff, 0xff, 0xff, 0xff, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x3, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x7, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x3, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x3, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x3, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x2, 0x8, 0x40, 0x0, 0x0, 0x0, 0x10, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x3, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x3, 0x10, 0x40, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0xff, 0xff, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0xc, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x7, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0xc, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0xc, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0xc, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x2, 0x8, 0x40, 0x0, 0x0, 0x0, 0x10, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0xc, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x7, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x2, 0x8, 0x40, 0x0, 0x0, 0x0, 0x10, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0xc0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x7, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0xc0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0xc0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0xc0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x2, 0x8, 0x40, 0x0, 0x0, 0x0, 0x10, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0xc0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x3, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x7, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x3, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x3, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x3, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x2, 0x8, 0x40, 0x0, 0x0, 0x0, 0x10, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x3, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0xc, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x7, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0xc, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0xc, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0xc, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x2, 0x8, 0x40, 0x0, 0x0, 0x0, 0x10, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0xc, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x7, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x2, 0x8, 0x40, 0x0, 0x0, 0x0, 0x10, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0xc0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x7, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0xc0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0xc0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0xc0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x2, 0x8, 0x40, 0x0, 0x0, 0x0, 0x10, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0xc0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x3, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x7, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x3, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x3, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x3, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x2, 0x8, 0x40, 0x0, 0x0, 0x0, 0x10, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x3, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x3, 0x10, 0x40, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0xff, 0xff, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0xc, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x7, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0xc, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0xc, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0xc, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x2, 0x8, 0x40, 0x0, 0x0, 0x0, 0x10, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0xc, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x7, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x2, 0x8, 0x40, 0x0, 0x0, 0x0, 0x10, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0xc0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x7, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0xc0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0xc0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0xc0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x2, 0x8, 0x40, 0x0, 0x0, 0x0, 0x10, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0xc0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x3, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x7, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x3, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x3, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x3, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x2, 0x8, 0x40, 0x0, 0x0, 0x0, 0x10, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x3, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0xc, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x7, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0xc, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0xc, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0xc, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x2, 0x8, 0x40, 0x0, 0x0, 0x0, 0x10, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0xc, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x7, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x2, 0x8, 0x40, 0x0, 0x0, 0x0, 0x10, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0xc0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x7, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0xc0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0xc0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0xc0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x2, 0x8, 0x40, 0x0, 0x0, 0x0, 0x10, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0xc0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0xff, 0xff, 0xff, 0xff, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 0x0, 0x0, 0x50, 0x0, 0x0, 0x0, 0x1, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x20, 0x20, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0xff, 0xff, 0xff, 0xff, 0x0, 0x0, 0x0, 0x0, + }, + expected: []pkgs{ + { + cores: 16, + efficiency: 0, + threads: 32, + }, + }, + }, + "Intel64 Family 6 Model 183 Stepping 1": { + input: []byte{ + 0x3, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0xff, 0xff, 0xff, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x1, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x3, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0xc, 0x40, 0x0, 0x0, 0xc0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x3, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x3, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x2, 0x10, 0x40, 0x0, 0x0, 0x0, 0x20, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x3, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x3, 0xc, 0x40, 0x0, 0x0, 0x0, 0xe0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0xff, 0xff, 0xff, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x1, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0xc, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0xc, 0x40, 0x0, 0x0, 0xc0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0xc, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0xc, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x2, 0x10, 0x40, 0x0, 0x0, 0x0, 0x20, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0xc, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x1, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0xc, 0x40, 0x0, 0x0, 0xc0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x2, 0x10, 0x40, 0x0, 0x0, 0x0, 0x20, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x1, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0xc0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0xc, 0x40, 0x0, 0x0, 0xc0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0xc0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0xc0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x2, 0x10, 0x40, 0x0, 0x0, 0x0, 0x20, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0xc0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x1, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x3, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0xc, 0x40, 0x0, 0x0, 0xc0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x3, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x3, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x2, 0x10, 0x40, 0x0, 0x0, 0x0, 0x20, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x3, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x1, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0xc, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0xc, 0x40, 0x0, 0x0, 0xc0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0xc, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0xc, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x2, 0x10, 0x40, 0x0, 0x0, 0x0, 0x20, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0xc, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x1, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0xc, 0x40, 0x0, 0x0, 0xc0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x2, 0x10, 0x40, 0x0, 0x0, 0x0, 0x20, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x1, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0xc0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0xc, 0x40, 0x0, 0x0, 0xc0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0xc0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0xc0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x2, 0x10, 0x40, 0x0, 0x0, 0x0, 0x20, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0xc0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x0, 0x1, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x2, 0x10, 0x40, 0x0, 0x0, 0x0, 0x40, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0xf, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x0, 0x1, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x4, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x0, 0x1, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x8, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x8, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x0, 0x1, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x8, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x10, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x10, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x0, 0x1, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x10, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x2, 0x10, 0x40, 0x0, 0x0, 0x0, 0x40, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0xf0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x20, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x20, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x0, 0x1, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x20, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x40, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x40, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x0, 0x1, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x40, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x80, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x80, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x0, 0x1, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x80, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0xff, 0xff, 0xff, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 0x0, 0x0, 0x50, 0x0, 0x0, 0x0, 0x1, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x18, 0x18, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0xff, 0xff, 0xff, 0x0, 0x0, 0x0, 0x0, 0x0, + }, + expected: []pkgs{ + { + cores: 16, + efficiency: 8, + threads: 24, + }, + }, + }, + "dual Intel64 Family 6 Model 85 Stepping 4": { + input: []byte{ + 0x3, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0xff, 0xff, 0xff, 0xff, 0xff, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x3, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x7, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x3, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x3, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x3, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x2, 0x10, 0x40, 0x0, 0x0, 0x0, 0x10, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x3, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x3, 0xb, 0x40, 0x0, 0x0, 0x0, 0xb8, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0xff, 0xff, 0xff, 0xff, 0xff, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0xc, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x7, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0xc, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0xc, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0xc, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x2, 0x10, 0x40, 0x0, 0x0, 0x0, 0x10, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0xc, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x7, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x2, 0x10, 0x40, 0x0, 0x0, 0x0, 0x10, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0xc0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x7, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0xc0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0xc0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0xc0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x2, 0x10, 0x40, 0x0, 0x0, 0x0, 0x10, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0xc0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x3, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x7, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x3, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x3, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x3, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x2, 0x10, 0x40, 0x0, 0x0, 0x0, 0x10, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x3, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0xc, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x7, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0xc, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0xc, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0xc, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x2, 0x10, 0x40, 0x0, 0x0, 0x0, 0x10, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0xc, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x7, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x2, 0x10, 0x40, 0x0, 0x0, 0x0, 0x10, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0xc0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x7, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0xc0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0xc0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0xc0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x2, 0x10, 0x40, 0x0, 0x0, 0x0, 0x10, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0xc0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x3, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x7, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x3, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x3, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x3, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x2, 0x10, 0x40, 0x0, 0x0, 0x0, 0x10, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x3, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0xc, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x7, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0xc, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0xc, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0xc, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x2, 0x10, 0x40, 0x0, 0x0, 0x0, 0x10, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0xc, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x7, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x2, 0x10, 0x40, 0x0, 0x0, 0x0, 0x10, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0xc0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x7, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0xc0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0xc0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0xc0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x2, 0x10, 0x40, 0x0, 0x0, 0x0, 0x10, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0xc0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x3, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x7, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x3, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x3, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x3, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x2, 0x10, 0x40, 0x0, 0x0, 0x0, 0x10, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x3, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0xc, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x7, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0xc, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0xc, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0xc, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x2, 0x10, 0x40, 0x0, 0x0, 0x0, 0x10, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0xc, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x7, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x2, 0x10, 0x40, 0x0, 0x0, 0x0, 0x10, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0xc0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x7, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0xc0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0xc0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0xc0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x2, 0x10, 0x40, 0x0, 0x0, 0x0, 0x10, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0xc0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x3, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x7, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x3, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x3, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x3, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x2, 0x10, 0x40, 0x0, 0x0, 0x0, 0x10, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x3, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0xc, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x7, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0xc, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0xc, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0xc, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x2, 0x10, 0x40, 0x0, 0x0, 0x0, 0x10, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0xc, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x7, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x2, 0x10, 0x40, 0x0, 0x0, 0x0, 0x10, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0xc0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x7, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0xc0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0xc0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0xc0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x2, 0x10, 0x40, 0x0, 0x0, 0x0, 0x10, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0xc0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x3, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0xff, 0xff, 0xff, 0xff, 0xff, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x3, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x7, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x3, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x3, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x3, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x2, 0x10, 0x40, 0x0, 0x0, 0x0, 0x10, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x3, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x3, 0xb, 0x40, 0x0, 0x0, 0x0, 0xb8, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0xff, 0xff, 0xff, 0xff, 0xff, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0xc, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x7, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0xc, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0xc, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0xc, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x2, 0x10, 0x40, 0x0, 0x0, 0x0, 0x10, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0xc, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x7, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x2, 0x10, 0x40, 0x0, 0x0, 0x0, 0x10, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0xc0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x7, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0xc0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0xc0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0xc0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x2, 0x10, 0x40, 0x0, 0x0, 0x0, 0x10, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0xc0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x3, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x7, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x3, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x3, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x3, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x2, 0x10, 0x40, 0x0, 0x0, 0x0, 0x10, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x3, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0xc, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x7, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0xc, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0xc, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0xc, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x2, 0x10, 0x40, 0x0, 0x0, 0x0, 0x10, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0xc, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x7, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x2, 0x10, 0x40, 0x0, 0x0, 0x0, 0x10, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0xc0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x7, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0xc0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0xc0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0xc0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x2, 0x10, 0x40, 0x0, 0x0, 0x0, 0x10, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0xc0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x3, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x7, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x3, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x3, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x3, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x2, 0x10, 0x40, 0x0, 0x0, 0x0, 0x10, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x3, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0xc, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x7, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0xc, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0xc, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0xc, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x2, 0x10, 0x40, 0x0, 0x0, 0x0, 0x10, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0xc, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x7, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x2, 0x10, 0x40, 0x0, 0x0, 0x0, 0x10, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0xc0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x7, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0xc0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0xc0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0xc0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x2, 0x10, 0x40, 0x0, 0x0, 0x0, 0x10, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0xc0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x3, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x7, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x3, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x3, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x3, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x2, 0x10, 0x40, 0x0, 0x0, 0x0, 0x10, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x3, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0xc, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x7, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0xc, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0xc, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0xc, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x2, 0x10, 0x40, 0x0, 0x0, 0x0, 0x10, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0xc, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x7, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x2, 0x10, 0x40, 0x0, 0x0, 0x0, 0x10, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0xc0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x7, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0xc0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0xc0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0xc0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x2, 0x10, 0x40, 0x0, 0x0, 0x0, 0x10, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0xc0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x3, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x7, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x3, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x3, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x3, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x2, 0x10, 0x40, 0x0, 0x0, 0x0, 0x10, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x3, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0xc, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x7, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0xc, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0xc, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0xc, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x2, 0x10, 0x40, 0x0, 0x0, 0x0, 0x10, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0xc, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x7, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x2, 0x10, 0x40, 0x0, 0x0, 0x0, 0x10, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0xc0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x7, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0xc0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0xc0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x1, 0x8, 0x40, 0x0, 0x0, 0x80, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0xc0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x0, 0x0, 0x38, 0x0, 0x0, 0x0, 0x2, 0x10, 0x40, 0x0, 0x0, 0x0, 0x10, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0xc0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0xff, 0xff, 0xff, 0xff, 0xff, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x30, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0xff, 0xff, 0xff, 0xff, 0xff, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 0x0, 0x0, 0x80, 0x0, 0x0, 0x0, 0x2, 0x0, 0x2, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x28, 0x28, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0xff, 0xff, 0xff, 0xff, 0xff, 0x0, 0x0, 0x0, 0x28, 0x28, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0xff, 0xff, 0xff, 0xff, 0xff, 0x0, 0x0, 0x0, + }, + expected: []pkgs{ + { + cores: 40, + efficiency: 0, + threads: 80, + }, { + cores: 40, + efficiency: 0, + threads: 80, + }, + }, + }, + } + + for k, v := range testCases { + t.Run(k, func(t *testing.T) { + resp := processSystemLogicalProcessorInforationList(v.input) + if len(resp) != len(v.expected) { + t.Fatalf("incorrect number of packages %d, got %d", v.expected, len(resp)) + } + for i, pkg := range v.expected { + if resp[i].coreCount != pkg.cores { + t.Fatalf("[%d] expected core count %d, got %d", i, pkg.cores, resp[i].coreCount) + } + if resp[i].efficiencyCoreCount != pkg.efficiency { + t.Fatalf("[%d] expected efficiency count %d, got %d", i, pkg.efficiency, resp[i].efficiencyCoreCount) + } + if resp[i].threadCount != pkg.threads { + t.Fatalf("[%d] expected thread count %d, got %d", i, pkg.threads, resp[i].threadCount) + } + } + }) + } +} diff --git a/discover/path.go b/discover/path.go new file mode 100644 index 0000000..8a20d8c --- /dev/null +++ b/discover/path.go @@ -0,0 +1,56 @@ +package discover + +import ( + "os" + "path/filepath" + "runtime" +) + +// LibPath is a path to lookup dynamic libraries +// in development it's usually 'build/lib/ollama' +// in distribution builds it's 'lib/ollama' on Windows +// '../lib/ollama' on Linux and the executable's directory on macOS +// note: distribution builds, additional GPU-specific libraries are +// found in subdirectories of the returned path, such as +// 'cuda_v11', 'cuda_v12', 'rocm', etc. +var LibOllamaPath string = func() string { + exe, err := os.Executable() + if err != nil { + return "" + } + + if eval, err := filepath.EvalSymlinks(exe); err == nil { + exe = eval + } + + var libPath string + switch runtime.GOOS { + case "windows": + libPath = filepath.Join(filepath.Dir(exe), "lib", "ollama") + case "linux": + libPath = filepath.Join(filepath.Dir(exe), "..", "lib", "ollama") + case "darwin": + libPath = filepath.Dir(exe) + } + + cwd, err := os.Getwd() + if err != nil { + return "" + } + + paths := []string{ + libPath, + + // build paths for development + filepath.Join(filepath.Dir(exe), "build", "lib", "ollama"), + filepath.Join(cwd, "build", "lib", "ollama"), + } + + for _, p := range paths { + if _, err := os.Stat(p); err == nil { + return p + } + } + + return filepath.Dir(exe) +}() diff --git a/discover/types.go b/discover/types.go new file mode 100644 index 0000000..c5212d9 --- /dev/null +++ b/discover/types.go @@ -0,0 +1,183 @@ +package discover + +import ( + "fmt" + "log/slog" + + "github.com/ollama/ollama/format" +) + +type memInfo struct { + TotalMemory uint64 `json:"total_memory,omitempty"` + FreeMemory uint64 `json:"free_memory,omitempty"` + FreeSwap uint64 `json:"free_swap,omitempty"` // TODO split this out for system only +} + +// Beginning of an `ollama info` command +type GpuInfo struct { // TODO better name maybe "InferenceProcessor"? + memInfo + Library string `json:"library,omitempty"` + + // Optional variant to select (e.g. versions, cpu feature flags) + Variant string `json:"variant"` + + // MinimumMemory represents the minimum memory required to use the GPU + MinimumMemory uint64 `json:"-"` + + // Any extra PATH/LD_LIBRARY_PATH dependencies required for the Library to operate properly + DependencyPath []string `json:"lib_path,omitempty"` + + // Extra environment variables specific to the GPU as list of [key,value] + EnvWorkarounds [][2]string `json:"envs,omitempty"` + + // Set to true if we can NOT reliably discover FreeMemory. A value of true indicates + // the FreeMemory is best effort, and may over or under report actual memory usage + // False indicates FreeMemory can generally be trusted on this GPU + UnreliableFreeMemory bool + + // GPU information + ID string `json:"gpu_id"` // string to use for selection of this specific GPU + Name string `json:"name"` // user friendly name if available + Compute string `json:"compute"` // Compute Capability or gfx + + // Driver Information - TODO no need to put this on each GPU + DriverMajor int `json:"driver_major,omitempty"` + DriverMinor int `json:"driver_minor,omitempty"` + + // TODO other performance capability info to help in scheduling decisions +} + +func (gpu GpuInfo) RunnerName() string { + if gpu.Variant != "" { + return gpu.Library + "_" + gpu.Variant + } + return gpu.Library +} + +type CPUInfo struct { + GpuInfo + CPUs []CPU +} + +// CPU type represents a CPU Package occupying a socket +type CPU struct { + ID string `cpuinfo:"processor"` + VendorID string `cpuinfo:"vendor_id"` + ModelName string `cpuinfo:"model name"` + CoreCount int + EfficiencyCoreCount int // Performance = CoreCount - Efficiency + ThreadCount int +} + +type CudaGPUInfo struct { + GpuInfo + OSOverhead uint64 // Memory overhead between the driver library and management library + index int //nolint:unused,nolintlint + computeMajor int //nolint:unused,nolintlint + computeMinor int //nolint:unused,nolintlint +} +type CudaGPUInfoList []CudaGPUInfo + +type RocmGPUInfo struct { + GpuInfo + usedFilepath string //nolint:unused,nolintlint + index int //nolint:unused,nolintlint +} +type RocmGPUInfoList []RocmGPUInfo + +type OneapiGPUInfo struct { + GpuInfo + driverIndex int //nolint:unused,nolintlint + gpuIndex int //nolint:unused,nolintlint +} +type OneapiGPUInfoList []OneapiGPUInfo + +type GpuInfoList []GpuInfo + +type UnsupportedGPUInfo struct { + GpuInfo + Reason string `json:"reason"` +} + +// Split up the set of gpu info's by Library and variant +func (l GpuInfoList) ByLibrary() []GpuInfoList { + resp := []GpuInfoList{} + libs := []string{} + for _, info := range l { + found := false + requested := info.Library + if info.Variant != "" { + requested += "_" + info.Variant + } + for i, lib := range libs { + if lib == requested { + resp[i] = append(resp[i], info) + found = true + break + } + } + if !found { + libs = append(libs, requested) + resp = append(resp, []GpuInfo{info}) + } + } + return resp +} + +// Report the GPU information into the log an Info level +func (l GpuInfoList) LogDetails() { + for _, g := range l { + slog.Info("inference compute", + "id", g.ID, + "library", g.Library, + "variant", g.Variant, + "compute", g.Compute, + "driver", fmt.Sprintf("%d.%d", g.DriverMajor, g.DriverMinor), + "name", g.Name, + "total", format.HumanBytes2(g.TotalMemory), + "available", format.HumanBytes2(g.FreeMemory), + ) + } +} + +// Sort by Free Space +type ByFreeMemory []GpuInfo + +func (a ByFreeMemory) Len() int { return len(a) } +func (a ByFreeMemory) Swap(i, j int) { a[i], a[j] = a[j], a[i] } +func (a ByFreeMemory) Less(i, j int) bool { return a[i].FreeMemory < a[j].FreeMemory } + +type SystemInfo struct { + System CPUInfo `json:"system"` + GPUs []GpuInfo `json:"gpus"` + UnsupportedGPUs []UnsupportedGPUInfo `json:"unsupported_gpus"` + DiscoveryErrors []string `json:"discovery_errors"` +} + +// Return the optimal number of threads to use for inference +func (si SystemInfo) GetOptimalThreadCount() int { + if len(si.System.CPUs) == 0 { + return 0 + } + + coreCount := 0 + for _, c := range si.System.CPUs { + coreCount += c.CoreCount - c.EfficiencyCoreCount + } + + return coreCount +} + +// For each GPU, check if it does NOT support flash attention +func (l GpuInfoList) FlashAttentionSupported() bool { + for _, gpu := range l { + supportsFA := gpu.Library == "metal" || + (gpu.Library == "cuda" && gpu.DriverMajor >= 7) || + gpu.Library == "rocm" + + if !supportsFA { + return false + } + } + return true +} diff --git a/docs/README.md b/docs/README.md new file mode 100644 index 0000000..4d3b714 --- /dev/null +++ b/docs/README.md @@ -0,0 +1,21 @@ +# Documentation + +### Getting Started +* [Quickstart](../README.md#quickstart) +* [Examples](./examples.md) +* [Importing models](./import.md) +* [Linux Documentation](./linux.md) +* [Windows Documentation](./windows.md) +* [Docker Documentation](./docker.md) + +### Reference + +* [API Reference](./api.md) +* [Modelfile Reference](./modelfile.md) +* [OpenAI Compatibility](./openai.md) + +### Resources + +* [Troubleshooting Guide](./troubleshooting.md) +* [FAQ](./faq.md) +* [Development guide](./development.md) diff --git a/docs/api.md b/docs/api.md new file mode 100644 index 0000000..abd2761 --- /dev/null +++ b/docs/api.md @@ -0,0 +1,1629 @@ +# API + +## Endpoints + +- [Generate a completion](#generate-a-completion) +- [Generate a chat completion](#generate-a-chat-completion) +- [Create a Model](#create-a-model) +- [List Local Models](#list-local-models) +- [Show Model Information](#show-model-information) +- [Copy a Model](#copy-a-model) +- [Delete a Model](#delete-a-model) +- [Pull a Model](#pull-a-model) +- [Push a Model](#push-a-model) +- [Generate Embeddings](#generate-embeddings) +- [List Running Models](#list-running-models) +- [Version](#version) + +## Conventions + +### Model names + +Model names follow a `model:tag` format, where `model` can have an optional namespace such as `example/model`. Some examples are `orca-mini:3b-q8_0` and `llama3:70b`. The tag is optional and, if not provided, will default to `latest`. The tag is used to identify a specific version. + +### Durations + +All durations are returned in nanoseconds. + +### Streaming responses + +Certain endpoints stream responses as JSON objects. Streaming can be disabled by providing `{"stream": false}` for these endpoints. + +## Generate a completion + +``` +POST /api/generate +``` + +Generate a response for a given prompt with a provided model. This is a streaming endpoint, so there will be a series of responses. The final response object will include statistics and additional data from the request. + +### Parameters + +- `model`: (required) the [model name](#model-names) +- `prompt`: the prompt to generate a response for +- `suffix`: the text after the model response +- `images`: (optional) a list of base64-encoded images (for multimodal models such as `llava`) + +Advanced parameters (optional): + +- `format`: the format to return a response in. Format can be `json` or a JSON schema +- `options`: additional model parameters listed in the documentation for the [Modelfile](./modelfile.md#valid-parameters-and-values) such as `temperature` +- `system`: system message to (overrides what is defined in the `Modelfile`) +- `template`: the prompt template to use (overrides what is defined in the `Modelfile`) +- `stream`: if `false` the response will be returned as a single response object, rather than a stream of objects +- `raw`: if `true` no formatting will be applied to the prompt. You may choose to use the `raw` parameter if you are specifying a full templated prompt in your request to the API +- `keep_alive`: controls how long the model will stay loaded into memory following the request (default: `5m`) +- `context` (deprecated): the context parameter returned from a previous request to `/generate`, this can be used to keep a short conversational memory + +#### Structured outputs + +Structured outputs are supported by providing a JSON schema in the `format` parameter. The model will generate a response that matches the schema. See the [structured outputs](#request-structured-outputs) example below. + +#### JSON mode + +Enable JSON mode by setting the `format` parameter to `json`. This will structure the response as a valid JSON object. See the JSON mode [example](#request-json-mode) below. + +> [!IMPORTANT] +> It's important to instruct the model to use JSON in the `prompt`. Otherwise, the model may generate large amounts whitespace. + +### Examples + +#### Generate request (Streaming) + +##### Request + +```shell +curl http://localhost:11434/api/generate -d '{ + "model": "llama3.2", + "prompt": "Why is the sky blue?" +}' +``` + +##### Response + +A stream of JSON objects is returned: + +```json +{ + "model": "llama3.2", + "created_at": "2023-08-04T08:52:19.385406455-07:00", + "response": "The", + "done": false +} +``` + +The final response in the stream also includes additional data about the generation: + +- `total_duration`: time spent generating the response +- `load_duration`: time spent in nanoseconds loading the model +- `prompt_eval_count`: number of tokens in the prompt +- `prompt_eval_duration`: time spent in nanoseconds evaluating the prompt +- `eval_count`: number of tokens in the response +- `eval_duration`: time in nanoseconds spent generating the response +- `context`: an encoding of the conversation used in this response, this can be sent in the next request to keep a conversational memory +- `response`: empty if the response was streamed, if not streamed, this will contain the full response + +To calculate how fast the response is generated in tokens per second (token/s), divide `eval_count` / `eval_duration` * `10^9`. + +```json +{ + "model": "llama3.2", + "created_at": "2023-08-04T19:22:45.499127Z", + "response": "", + "done": true, + "context": [1, 2, 3], + "total_duration": 10706818083, + "load_duration": 6338219291, + "prompt_eval_count": 26, + "prompt_eval_duration": 130079000, + "eval_count": 259, + "eval_duration": 4232710000 +} +``` + +#### Request (No streaming) + +##### Request + +A response can be received in one reply when streaming is off. + +```shell +curl http://localhost:11434/api/generate -d '{ + "model": "llama3.2", + "prompt": "Why is the sky blue?", + "stream": false +}' +``` + +##### Response + +If `stream` is set to `false`, the response will be a single JSON object: + +```json +{ + "model": "llama3.2", + "created_at": "2023-08-04T19:22:45.499127Z", + "response": "The sky is blue because it is the color of the sky.", + "done": true, + "context": [1, 2, 3], + "total_duration": 5043500667, + "load_duration": 5025959, + "prompt_eval_count": 26, + "prompt_eval_duration": 325953000, + "eval_count": 290, + "eval_duration": 4709213000 +} +``` + +#### Request (with suffix) + +##### Request + +```shell +curl http://localhost:11434/api/generate -d '{ + "model": "codellama:code", + "prompt": "def compute_gcd(a, b):", + "suffix": " return result", + "options": { + "temperature": 0 + }, + "stream": false +}' +``` + +##### Response + +```json5 +{ + "model": "codellama:code", + "created_at": "2024-07-22T20:47:51.147561Z", + "response": "\n if a == 0:\n return b\n else:\n return compute_gcd(b % a, a)\n\ndef compute_lcm(a, b):\n result = (a * b) / compute_gcd(a, b)\n", + "done": true, + "done_reason": "stop", + "context": [...], + "total_duration": 1162761250, + "load_duration": 6683708, + "prompt_eval_count": 17, + "prompt_eval_duration": 201222000, + "eval_count": 63, + "eval_duration": 953997000 +} +``` + +#### Request (Structured outputs) + +##### Request + +```shell +curl -X POST http://localhost:11434/api/generate -H "Content-Type: application/json" -d '{ + "model": "llama3.1:8b", + "prompt": "Ollama is 22 years old and is busy saving the world. Respond using JSON", + "stream": false, + "format": { + "type": "object", + "properties": { + "age": { + "type": "integer" + }, + "available": { + "type": "boolean" + } + }, + "required": [ + "age", + "available" + ] + } +}' +``` + +##### Response + +```json +{ + "model": "llama3.1:8b", + "created_at": "2024-12-06T00:48:09.983619Z", + "response": "{\n \"age\": 22,\n \"available\": true\n}", + "done": true, + "done_reason": "stop", + "context": [1, 2, 3], + "total_duration": 1075509083, + "load_duration": 567678166, + "prompt_eval_count": 28, + "prompt_eval_duration": 236000000, + "eval_count": 16, + "eval_duration": 269000000 +} +``` + +#### Request (JSON mode) + +> [!IMPORTANT] +> When `format` is set to `json`, the output will always be a well-formed JSON object. It's important to also instruct the model to respond in JSON. + +##### Request + +```shell +curl http://localhost:11434/api/generate -d '{ + "model": "llama3.2", + "prompt": "What color is the sky at different times of the day? Respond using JSON", + "format": "json", + "stream": false +}' +``` + +##### Response + +```json +{ + "model": "llama3.2", + "created_at": "2023-11-09T21:07:55.186497Z", + "response": "{\n\"morning\": {\n\"color\": \"blue\"\n},\n\"noon\": {\n\"color\": \"blue-gray\"\n},\n\"afternoon\": {\n\"color\": \"warm gray\"\n},\n\"evening\": {\n\"color\": \"orange\"\n}\n}\n", + "done": true, + "context": [1, 2, 3], + "total_duration": 4648158584, + "load_duration": 4071084, + "prompt_eval_count": 36, + "prompt_eval_duration": 439038000, + "eval_count": 180, + "eval_duration": 4196918000 +} +``` + +The value of `response` will be a string containing JSON similar to: + +```json +{ + "morning": { + "color": "blue" + }, + "noon": { + "color": "blue-gray" + }, + "afternoon": { + "color": "warm gray" + }, + "evening": { + "color": "orange" + } +} +``` + +#### Request (with images) + +To submit images to multimodal models such as `llava` or `bakllava`, provide a list of base64-encoded `images`: + +#### Request + +```shell +curl http://localhost:11434/api/generate -d '{ + "model": "llava", + "prompt":"What is in this picture?", + "stream": false, + "images": ["iVBORw0KGgoAAAANSUhEUgAAAG0AAABmCAYAAADBPx+VAAAACXBIWXMAAAsTAAALEwEAmpwYAAAAAXNSR0IArs4c6QAAAARnQU1BAACxjwv8YQUAAA3VSURBVHgB7Z27r0zdG8fX743i1bi1ikMoFMQloXRpKFFIqI7LH4BEQ+NWIkjQuSWCRIEoULk0gsK1kCBI0IhrQVT7tz/7zZo888yz1r7MnDl7z5xvsjkzs2fP3uu71nNfa7lkAsm7d++Sffv2JbNmzUqcc8m0adOSzZs3Z+/XES4ZckAWJEGWPiCxjsQNLWmQsWjRIpMseaxcuTKpG/7HP27I8P79e7dq1ars/yL4/v27S0ejqwv+cUOGEGGpKHR37tzJCEpHV9tnT58+dXXCJDdECBE2Ojrqjh071hpNECjx4cMHVycM1Uhbv359B2F79+51586daxN/+pyRkRFXKyRDAqxEp4yMlDDzXG1NPnnyJKkThoK0VFd1ELZu3TrzXKxKfW7dMBQ6bcuWLW2v0VlHjx41z717927ba22U9APcw7Nnz1oGEPeL3m3p2mTAYYnFmMOMXybPPXv2bNIPpFZr1NHn4HMw0KRBjg9NuRw95s8PEcz/6DZELQd/09C9QGq5RsmSRybqkwHGjh07OsJSsYYm3ijPpyHzoiacg35MLdDSIS/O1yM778jOTwYUkKNHWUzUWaOsylE00MyI0fcnOwIdjvtNdW/HZwNLGg+sR1kMepSNJXmIwxBZiG8tDTpEZzKg0GItNsosY8USkxDhD0Rinuiko2gfL/RbiD2LZAjU9zKQJj8RDR0vJBR1/Phx9+PHj9Z7REF4nTZkxzX4LCXHrV271qXkBAPGfP/atWvu/PnzHe4C97F48eIsRLZ9+3a3f/9+87dwP1JxaF7/3r17ba+5l4EcaVo0lj3SBq5kGTJSQmLWMjgYNei2GPT1MuMqGTDEFHzeQSP2wi/jGnkmPJ/nhccs44jvDAxpVcxnq0F6eT8h4ni/iIWpR5lPyA6ETkNXoSukvpJAD3AsXLiwpZs49+fPn5ke4j10TqYvegSfn0OnafC+Tv9ooA/JPkgQysqQNBzagXY55nO/oa1F7qvIPWkRL12WRpMWUvpVDYmxAPehxWSe8ZEXL20sadYIozfmNch4QJPAfeJgW3rNsnzphBKNJM2KKODo1rVOMRYik5ETy3ix4qWNI81qAAirizgMIc+yhTytx0JWZuNI03qsrgWlGtwjoS9XwgUhWGyhUaRZZQNNIEwCiXD16tXcAHUs79co0vSD8rrJCIW98pzvxpAWyyo3HYwqS0+H0BjStClcZJT5coMm6D2LOF8TolGJtK9fvyZpyiC5ePFi9nc/oJU4eiEP0jVoAnHa9wyJycITMP78+eMeP37sXrx44d6+fdt6f82aNdkx1pg9e3Zb5W+RSRE+n+VjksQWifvVaTKFhn5O8my63K8Qabdv33b379/PiAP//vuvW7BggZszZ072/+TJk91YgkafPn166zXB1rQHFvouAWHq9z3SEevSUerqCn2/dDCeta2jxYbr69evk4MHDyY7d+7MjhMnTiTPnz9Pfv/+nfQT2ggpO2dMF8cghuoM7Ygj5iWCqRlGFml0QC/ftGmTmzt3rmsaKDsgBSPh0/8yPeLLBihLkOKJc0jp8H8vUzcxIA1k6QJ/c78tWEyj5P3o4u9+jywNPdJi5rAH9x0KHcl4Hg570eQp3+vHXGyrmEeigzQsQsjavXt38ujRo44LQuDDhw+TW7duRS1HGgMxhNXHgflaNTOsHyKvHK5Ijo2jbFjJBQK9YwFd6RVMzfgRBmEfP37suBBm/p49e1qjEP2mwTViNRo0VJWH1deMXcNK08uUjVUu7s/zRaL+oLNxz1bpANco4npUgX4G2eFbpDFyQoQxojBCpEGSytmOH8qrH5Q9vuzD6ofQylkCUmh8DBAr+q8JCyVNtWQIidKQE9wNtLSQnS4jDSsxNHogzFuQBw4cyM61UKVsjfr3ooBkPSqqQHesUPWVtzi9/vQi1T+rJj7WiTz4Pt/l3LxUkr5P2VYZaZ4URpsE+st/dujQoaBBYokbrz/8TJNQYLSonrPS9kUaSkPeZyj1AWSj+d+VBoy1pIWVNed8P0Ll/ee5HdGRhrHhR5GGN0r4LGZBaj8oFDJitBTJzIZgFcmU0Y8ytWMZMzJOaXUSrUs5RxKnrxmbb5YXO9VGUhtpXldhEUogFr3IzIsvlpmdosVcGVGXFWp2oU9kLFL3dEkSz6NHEY1sjSRdIuDFWEhd8KxFqsRi1uM/nz9/zpxnwlESONdg6dKlbsaMGS4EHFHtjFIDHwKOo46l4TxSuxgDzi+rE2jg+BaFruOX4HXa0Nnf1lwAPufZeF8/r6zD97WK2qFnGjBxTw5qNGPxT+5T/r7/7RawFC3j4vTp09koCxkeHjqbHJqArmH5UrFKKksnxrK7FuRIs8STfBZv+luugXZ2pR/pP9Ois4z+TiMzUUkUjD0iEi1fzX8GmXyuxUBRcaUfykV0YZnlJGKQpOiGB76x5GeWkWWJc3mOrK6S7xdND+W5N6XyaRgtWJFe13GkaZnKOsYqGdOVVVbGupsyA/l7emTLHi7vwTdirNEt0qxnzAvBFcnQF16xh/TMpUuXHDowhlA9vQVraQhkudRdzOnK+04ZSP3DUhVSP61YsaLtd/ks7ZgtPcXqPqEafHkdqa84X6aCeL7YWlv6edGFHb+ZFICPlljHhg0bKuk0CSvVznWsotRu433alNdFrqG45ejoaPCaUkWERpLXjzFL2Rpllp7PJU2a/v7Ab8N05/9t27Z16KUqoFGsxnI9EosS2niSYg9SpU6B4JgTrvVW1flt1sT+0ADIJU2maXzcUTraGCRaL1Wp9rUMk16PMom8QhruxzvZIegJjFU7LLCePfS8uaQdPny4jTTL0dbee5mYokQsXTIWNY46kuMbnt8Kmec+LGWtOVIl9cT1rCB0V8WqkjAsRwta93TbwNYoGKsUSChN44lgBNCoHLHzquYKrU6qZ8lolCIN0Rh6cP0Q3U6I6IXILYOQI513hJaSKAorFpuHXJNfVlpRtmYBk1Su1obZr5dnKAO+L10Hrj3WZW+E3qh6IszE37F6EB+68mGpvKm4eb9bFrlzrok7fvr0Kfv727dvWRmdVTJHw0qiiCUSZ6wCK+7XL/AcsgNyL74DQQ730sv78Su7+t/A36MdY0sW5o40ahslXr58aZ5HtZB8GH64m9EmMZ7FpYw4T6QnrZfgenrhFxaSiSGXtPnz57e9TkNZLvTjeqhr734CNtrK41L40sUQckmj1lGKQ0rC37x544r8eNXRpnVE3ZZY7zXo8NomiO0ZUCj2uHz58rbXoZ6gc0uA+F6ZeKS/jhRDUq8MKrTho9fEkihMmhxtBI1DxKFY9XLpVcSkfoi8JGnToZO5sU5aiDQIW716ddt7ZLYtMQlhECdBGXZZMWldY5BHm5xgAroWj4C0hbYkSc/jBmggIrXJWlZM6pSETsEPGqZOndr2uuuR5rF169a2HoHPdurUKZM4CO1WTPqaDaAd+GFGKdIQkxAn9RuEWcTRyN2KSUgiSgF5aWzPTeA/lN5rZubMmR2bE4SIC4nJoltgAV/dVefZm72AtctUCJU2CMJ327hxY9t7EHbkyJFseq+EJSY16RPo3Dkq1kkr7+q0bNmyDuLQcZBEPYmHVdOBiJyIlrRDq41YPWfXOxUysi5fvtyaj+2BpcnsUV/oSoEMOk2CQGlr4ckhBwaetBhjCwH0ZHtJROPJkyc7UjcYLDjmrH7ADTEBXFfOYmB0k9oYBOjJ8b4aOYSe7QkKcYhFlq3QYLQhSidNmtS2RATwy8YOM3EQJsUjKiaWZ+vZToUQgzhkHXudb/PW5YMHD9yZM2faPsMwoc7RciYJXbGuBqJ1UIGKKLv915jsvgtJxCZDubdXr165mzdvtr1Hz5LONA8jrUwKPqsmVesKa49S3Q4WxmRPUEYdTjgiUcfUwLx589ySJUva3oMkP6IYddq6HMS4o55xBJBUeRjzfa4Zdeg56QZ43LhxoyPo7Lf1kNt7oO8wWAbNwaYjIv5lhyS7kRf96dvm5Jah8vfvX3flyhX35cuX6HfzFHOToS1H4BenCaHvO8pr8iDuwoUL7tevX+b5ZdbBair0xkFIlFDlW4ZknEClsp/TzXyAKVOmmHWFVSbDNw1l1+4f90U6IY/q4V27dpnE9bJ+v87QEydjqx/UamVVPRG+mwkNTYN+9tjkwzEx+atCm/X9WvWtDtAb68Wy9LXa1UmvCDDIpPkyOQ5ZwSzJ4jMrvFcr0rSjOUh+GcT4LSg5ugkW1Io0/SCDQBojh0hPlaJdah+tkVYrnTZowP8iq1F1TgMBBauufyB33x1v+NWFYmT5KmppgHC+NkAgbmRkpD3yn9QIseXymoTQFGQmIOKTxiZIWpvAatenVqRVXf2nTrAWMsPnKrMZHz6bJq5jvce6QK8J1cQNgKxlJapMPdZSR64/UivS9NztpkVEdKcrs5alhhWP9NeqlfWopzhZScI6QxseegZRGeg5a8C3Re1Mfl1ScP36ddcUaMuv24iOJtz7sbUjTS4qBvKmstYJoUauiuD3k5qhyr7QdUHMeCgLa1Ear9NquemdXgmum4fvJ6w1lqsuDhNrg1qSpleJK7K3TF0Q2jSd94uSZ60kK1e3qyVpQK6PVWXp2/FC3mp6jBhKKOiY2h3gtUV64TWM6wDETRPLDfSakXmH3w8g9Jlug8ZtTt4kVF0kLUYYmCCtD/DrQ5YhMGbA9L3ucdjh0y8kOHW5gU/VEEmJTcL4Pz/f7mgoAbYkAAAAAElFTkSuQmCC"] +}' +``` + +#### Response + +```json +{ + "model": "llava", + "created_at": "2023-11-03T15:36:02.583064Z", + "response": "A happy cartoon character, which is cute and cheerful.", + "done": true, + "context": [1, 2, 3], + "total_duration": 2938432250, + "load_duration": 2559292, + "prompt_eval_count": 1, + "prompt_eval_duration": 2195557000, + "eval_count": 44, + "eval_duration": 736432000 +} +``` + +#### Request (Raw Mode) + +In some cases, you may wish to bypass the templating system and provide a full prompt. In this case, you can use the `raw` parameter to disable templating. Also note that raw mode will not return a context. + +##### Request + +```shell +curl http://localhost:11434/api/generate -d '{ + "model": "mistral", + "prompt": "[INST] why is the sky blue? [/INST]", + "raw": true, + "stream": false +}' +``` + +#### Request (Reproducible outputs) + +For reproducible outputs, set `seed` to a number: + +##### Request + +```shell +curl http://localhost:11434/api/generate -d '{ + "model": "mistral", + "prompt": "Why is the sky blue?", + "options": { + "seed": 123 + } +}' +``` + +##### Response + +```json +{ + "model": "mistral", + "created_at": "2023-11-03T15:36:02.583064Z", + "response": " The sky appears blue because of a phenomenon called Rayleigh scattering.", + "done": true, + "total_duration": 8493852375, + "load_duration": 6589624375, + "prompt_eval_count": 14, + "prompt_eval_duration": 119039000, + "eval_count": 110, + "eval_duration": 1779061000 +} +``` + +#### Generate request (With options) + +If you want to set custom options for the model at runtime rather than in the Modelfile, you can do so with the `options` parameter. This example sets every available option, but you can set any of them individually and omit the ones you do not want to override. + +##### Request + +```shell +curl http://localhost:11434/api/generate -d '{ + "model": "llama3.2", + "prompt": "Why is the sky blue?", + "stream": false, + "options": { + "num_keep": 5, + "seed": 42, + "num_predict": 100, + "top_k": 20, + "top_p": 0.9, + "min_p": 0.0, + "typical_p": 0.7, + "repeat_last_n": 33, + "temperature": 0.8, + "repeat_penalty": 1.2, + "presence_penalty": 1.5, + "frequency_penalty": 1.0, + "penalize_newline": true, + "stop": ["\n", "user:"], + "numa": false, + "num_ctx": 1024, + "num_batch": 2, + "num_gpu": 1, + "main_gpu": 0, + "use_mmap": true, + "num_thread": 8 + } +}' +``` + +##### Response + +```json +{ + "model": "llama3.2", + "created_at": "2023-08-04T19:22:45.499127Z", + "response": "The sky is blue because it is the color of the sky.", + "done": true, + "context": [1, 2, 3], + "total_duration": 4935886791, + "load_duration": 534986708, + "prompt_eval_count": 26, + "prompt_eval_duration": 107345000, + "eval_count": 237, + "eval_duration": 4289432000 +} +``` + +#### Load a model + +If an empty prompt is provided, the model will be loaded into memory. + +##### Request + +```shell +curl http://localhost:11434/api/generate -d '{ + "model": "llama3.2" +}' +``` + +##### Response + +A single JSON object is returned: + +```json +{ + "model": "llama3.2", + "created_at": "2023-12-18T19:52:07.071755Z", + "response": "", + "done": true +} +``` + +#### Unload a model + +If an empty prompt is provided and the `keep_alive` parameter is set to `0`, a model will be unloaded from memory. + +##### Request + +```shell +curl http://localhost:11434/api/generate -d '{ + "model": "llama3.2", + "keep_alive": 0 +}' +``` + +##### Response + +A single JSON object is returned: + +```json +{ + "model": "llama3.2", + "created_at": "2024-09-12T03:54:03.516566Z", + "response": "", + "done": true, + "done_reason": "unload" +} +``` + +## Generate a chat completion + +``` +POST /api/chat +``` + +Generate the next message in a chat with a provided model. This is a streaming endpoint, so there will be a series of responses. Streaming can be disabled using `"stream": false`. The final response object will include statistics and additional data from the request. + +### Parameters + +- `model`: (required) the [model name](#model-names) +- `messages`: the messages of the chat, this can be used to keep a chat memory +- `tools`: list of tools in JSON for the model to use if supported + +The `message` object has the following fields: + +- `role`: the role of the message, either `system`, `user`, `assistant`, or `tool` +- `content`: the content of the message +- `images` (optional): a list of images to include in the message (for multimodal models such as `llava`) +- `tool_calls` (optional): a list of tools in JSON that the model wants to use + +Advanced parameters (optional): + +- `format`: the format to return a response in. Format can be `json` or a JSON schema. +- `options`: additional model parameters listed in the documentation for the [Modelfile](./modelfile.md#valid-parameters-and-values) such as `temperature` +- `stream`: if `false` the response will be returned as a single response object, rather than a stream of objects +- `keep_alive`: controls how long the model will stay loaded into memory following the request (default: `5m`) + +### Structured outputs + +Structured outputs are supported by providing a JSON schema in the `format` parameter. The model will generate a response that matches the schema. See the [Chat request (Structured outputs)](#chat-request-structured-outputs) example below. + +### Examples + +#### Chat Request (Streaming) + +##### Request + +Send a chat message with a streaming response. + +```shell +curl http://localhost:11434/api/chat -d '{ + "model": "llama3.2", + "messages": [ + { + "role": "user", + "content": "why is the sky blue?" + } + ] +}' +``` + +##### Response + +A stream of JSON objects is returned: + +```json +{ + "model": "llama3.2", + "created_at": "2023-08-04T08:52:19.385406455-07:00", + "message": { + "role": "assistant", + "content": "The", + "images": null + }, + "done": false +} +``` + +Final response: + +```json +{ + "model": "llama3.2", + "created_at": "2023-08-04T19:22:45.499127Z", + "message": { + "role": "assistant", + "content": "" + }, + "done": true, + "total_duration": 4883583458, + "load_duration": 1334875, + "prompt_eval_count": 26, + "prompt_eval_duration": 342546000, + "eval_count": 282, + "eval_duration": 4535599000 +} +``` + +#### Chat request (No streaming) + +##### Request + +```shell +curl http://localhost:11434/api/chat -d '{ + "model": "llama3.2", + "messages": [ + { + "role": "user", + "content": "why is the sky blue?" + } + ], + "stream": false +}' +``` + +##### Response + +```json +{ + "model": "llama3.2", + "created_at": "2023-12-12T14:13:43.416799Z", + "message": { + "role": "assistant", + "content": "Hello! How are you today?" + }, + "done": true, + "total_duration": 5191566416, + "load_duration": 2154458, + "prompt_eval_count": 26, + "prompt_eval_duration": 383809000, + "eval_count": 298, + "eval_duration": 4799921000 +} +``` + +#### Chat request (Structured outputs) + +##### Request + +```shell +curl -X POST http://localhost:11434/api/chat -H "Content-Type: application/json" -d '{ + "model": "llama3.1", + "messages": [{"role": "user", "content": "Ollama is 22 years old and busy saving the world. Return a JSON object with the age and availability."}], + "stream": false, + "format": { + "type": "object", + "properties": { + "age": { + "type": "integer" + }, + "available": { + "type": "boolean" + } + }, + "required": [ + "age", + "available" + ] + }, + "options": { + "temperature": 0 + } +}' +``` + +##### Response + +```json +{ + "model": "llama3.1", + "created_at": "2024-12-06T00:46:58.265747Z", + "message": { "role": "assistant", "content": "{\"age\": 22, \"available\": false}" }, + "done_reason": "stop", + "done": true, + "total_duration": 2254970291, + "load_duration": 574751416, + "prompt_eval_count": 34, + "prompt_eval_duration": 1502000000, + "eval_count": 12, + "eval_duration": 175000000 +} +``` + +#### Chat request (With History) + +Send a chat message with a conversation history. You can use this same approach to start the conversation using multi-shot or chain-of-thought prompting. + +##### Request + +```shell +curl http://localhost:11434/api/chat -d '{ + "model": "llama3.2", + "messages": [ + { + "role": "user", + "content": "why is the sky blue?" + }, + { + "role": "assistant", + "content": "due to rayleigh scattering." + }, + { + "role": "user", + "content": "how is that different than mie scattering?" + } + ] +}' +``` + +##### Response + +A stream of JSON objects is returned: + +```json +{ + "model": "llama3.2", + "created_at": "2023-08-04T08:52:19.385406455-07:00", + "message": { + "role": "assistant", + "content": "The" + }, + "done": false +} +``` + +Final response: + +```json +{ + "model": "llama3.2", + "created_at": "2023-08-04T19:22:45.499127Z", + "done": true, + "total_duration": 8113331500, + "load_duration": 6396458, + "prompt_eval_count": 61, + "prompt_eval_duration": 398801000, + "eval_count": 468, + "eval_duration": 7701267000 +} +``` + +#### Chat request (with images) + +##### Request + +Send a chat message with images. The images should be provided as an array, with the individual images encoded in Base64. + +```shell +curl http://localhost:11434/api/chat -d '{ + "model": "llava", + "messages": [ + { + "role": "user", + "content": "what is in this image?", + "images": ["iVBORw0KGgoAAAANSUhEUgAAAG0AAABmCAYAAADBPx+VAAAACXBIWXMAAAsTAAALEwEAmpwYAAAAAXNSR0IArs4c6QAAAARnQU1BAACxjwv8YQUAAA3VSURBVHgB7Z27r0zdG8fX743i1bi1ikMoFMQloXRpKFFIqI7LH4BEQ+NWIkjQuSWCRIEoULk0gsK1kCBI0IhrQVT7tz/7zZo888yz1r7MnDl7z5xvsjkzs2fP3uu71nNfa7lkAsm7d++Sffv2JbNmzUqcc8m0adOSzZs3Z+/XES4ZckAWJEGWPiCxjsQNLWmQsWjRIpMseaxcuTKpG/7HP27I8P79e7dq1ars/yL4/v27S0ejqwv+cUOGEGGpKHR37tzJCEpHV9tnT58+dXXCJDdECBE2Ojrqjh071hpNECjx4cMHVycM1Uhbv359B2F79+51586daxN/+pyRkRFXKyRDAqxEp4yMlDDzXG1NPnnyJKkThoK0VFd1ELZu3TrzXKxKfW7dMBQ6bcuWLW2v0VlHjx41z717927ba22U9APcw7Nnz1oGEPeL3m3p2mTAYYnFmMOMXybPPXv2bNIPpFZr1NHn4HMw0KRBjg9NuRw95s8PEcz/6DZELQd/09C9QGq5RsmSRybqkwHGjh07OsJSsYYm3ijPpyHzoiacg35MLdDSIS/O1yM778jOTwYUkKNHWUzUWaOsylE00MyI0fcnOwIdjvtNdW/HZwNLGg+sR1kMepSNJXmIwxBZiG8tDTpEZzKg0GItNsosY8USkxDhD0Rinuiko2gfL/RbiD2LZAjU9zKQJj8RDR0vJBR1/Phx9+PHj9Z7REF4nTZkxzX4LCXHrV271qXkBAPGfP/atWvu/PnzHe4C97F48eIsRLZ9+3a3f/9+87dwP1JxaF7/3r17ba+5l4EcaVo0lj3SBq5kGTJSQmLWMjgYNei2GPT1MuMqGTDEFHzeQSP2wi/jGnkmPJ/nhccs44jvDAxpVcxnq0F6eT8h4ni/iIWpR5lPyA6ETkNXoSukvpJAD3AsXLiwpZs49+fPn5ke4j10TqYvegSfn0OnafC+Tv9ooA/JPkgQysqQNBzagXY55nO/oa1F7qvIPWkRL12WRpMWUvpVDYmxAPehxWSe8ZEXL20sadYIozfmNch4QJPAfeJgW3rNsnzphBKNJM2KKODo1rVOMRYik5ETy3ix4qWNI81qAAirizgMIc+yhTytx0JWZuNI03qsrgWlGtwjoS9XwgUhWGyhUaRZZQNNIEwCiXD16tXcAHUs79co0vSD8rrJCIW98pzvxpAWyyo3HYwqS0+H0BjStClcZJT5coMm6D2LOF8TolGJtK9fvyZpyiC5ePFi9nc/oJU4eiEP0jVoAnHa9wyJycITMP78+eMeP37sXrx44d6+fdt6f82aNdkx1pg9e3Zb5W+RSRE+n+VjksQWifvVaTKFhn5O8my63K8Qabdv33b379/PiAP//vuvW7BggZszZ072/+TJk91YgkafPn166zXB1rQHFvouAWHq9z3SEevSUerqCn2/dDCeta2jxYbr69evk4MHDyY7d+7MjhMnTiTPnz9Pfv/+nfQT2ggpO2dMF8cghuoM7Ygj5iWCqRlGFml0QC/ftGmTmzt3rmsaKDsgBSPh0/8yPeLLBihLkOKJc0jp8H8vUzcxIA1k6QJ/c78tWEyj5P3o4u9+jywNPdJi5rAH9x0KHcl4Hg570eQp3+vHXGyrmEeigzQsQsjavXt38ujRo44LQuDDhw+TW7duRS1HGgMxhNXHgflaNTOsHyKvHK5Ijo2jbFjJBQK9YwFd6RVMzfgRBmEfP37suBBm/p49e1qjEP2mwTViNRo0VJWH1deMXcNK08uUjVUu7s/zRaL+oLNxz1bpANco4npUgX4G2eFbpDFyQoQxojBCpEGSytmOH8qrH5Q9vuzD6ofQylkCUmh8DBAr+q8JCyVNtWQIidKQE9wNtLSQnS4jDSsxNHogzFuQBw4cyM61UKVsjfr3ooBkPSqqQHesUPWVtzi9/vQi1T+rJj7WiTz4Pt/l3LxUkr5P2VYZaZ4URpsE+st/dujQoaBBYokbrz/8TJNQYLSonrPS9kUaSkPeZyj1AWSj+d+VBoy1pIWVNed8P0Ll/ee5HdGRhrHhR5GGN0r4LGZBaj8oFDJitBTJzIZgFcmU0Y8ytWMZMzJOaXUSrUs5RxKnrxmbb5YXO9VGUhtpXldhEUogFr3IzIsvlpmdosVcGVGXFWp2oU9kLFL3dEkSz6NHEY1sjSRdIuDFWEhd8KxFqsRi1uM/nz9/zpxnwlESONdg6dKlbsaMGS4EHFHtjFIDHwKOo46l4TxSuxgDzi+rE2jg+BaFruOX4HXa0Nnf1lwAPufZeF8/r6zD97WK2qFnGjBxTw5qNGPxT+5T/r7/7RawFC3j4vTp09koCxkeHjqbHJqArmH5UrFKKksnxrK7FuRIs8STfBZv+luugXZ2pR/pP9Ois4z+TiMzUUkUjD0iEi1fzX8GmXyuxUBRcaUfykV0YZnlJGKQpOiGB76x5GeWkWWJc3mOrK6S7xdND+W5N6XyaRgtWJFe13GkaZnKOsYqGdOVVVbGupsyA/l7emTLHi7vwTdirNEt0qxnzAvBFcnQF16xh/TMpUuXHDowhlA9vQVraQhkudRdzOnK+04ZSP3DUhVSP61YsaLtd/ks7ZgtPcXqPqEafHkdqa84X6aCeL7YWlv6edGFHb+ZFICPlljHhg0bKuk0CSvVznWsotRu433alNdFrqG45ejoaPCaUkWERpLXjzFL2Rpllp7PJU2a/v7Ab8N05/9t27Z16KUqoFGsxnI9EosS2niSYg9SpU6B4JgTrvVW1flt1sT+0ADIJU2maXzcUTraGCRaL1Wp9rUMk16PMom8QhruxzvZIegJjFU7LLCePfS8uaQdPny4jTTL0dbee5mYokQsXTIWNY46kuMbnt8Kmec+LGWtOVIl9cT1rCB0V8WqkjAsRwta93TbwNYoGKsUSChN44lgBNCoHLHzquYKrU6qZ8lolCIN0Rh6cP0Q3U6I6IXILYOQI513hJaSKAorFpuHXJNfVlpRtmYBk1Su1obZr5dnKAO+L10Hrj3WZW+E3qh6IszE37F6EB+68mGpvKm4eb9bFrlzrok7fvr0Kfv727dvWRmdVTJHw0qiiCUSZ6wCK+7XL/AcsgNyL74DQQ730sv78Su7+t/A36MdY0sW5o40ahslXr58aZ5HtZB8GH64m9EmMZ7FpYw4T6QnrZfgenrhFxaSiSGXtPnz57e9TkNZLvTjeqhr734CNtrK41L40sUQckmj1lGKQ0rC37x544r8eNXRpnVE3ZZY7zXo8NomiO0ZUCj2uHz58rbXoZ6gc0uA+F6ZeKS/jhRDUq8MKrTho9fEkihMmhxtBI1DxKFY9XLpVcSkfoi8JGnToZO5sU5aiDQIW716ddt7ZLYtMQlhECdBGXZZMWldY5BHm5xgAroWj4C0hbYkSc/jBmggIrXJWlZM6pSETsEPGqZOndr2uuuR5rF169a2HoHPdurUKZM4CO1WTPqaDaAd+GFGKdIQkxAn9RuEWcTRyN2KSUgiSgF5aWzPTeA/lN5rZubMmR2bE4SIC4nJoltgAV/dVefZm72AtctUCJU2CMJ327hxY9t7EHbkyJFseq+EJSY16RPo3Dkq1kkr7+q0bNmyDuLQcZBEPYmHVdOBiJyIlrRDq41YPWfXOxUysi5fvtyaj+2BpcnsUV/oSoEMOk2CQGlr4ckhBwaetBhjCwH0ZHtJROPJkyc7UjcYLDjmrH7ADTEBXFfOYmB0k9oYBOjJ8b4aOYSe7QkKcYhFlq3QYLQhSidNmtS2RATwy8YOM3EQJsUjKiaWZ+vZToUQgzhkHXudb/PW5YMHD9yZM2faPsMwoc7RciYJXbGuBqJ1UIGKKLv915jsvgtJxCZDubdXr165mzdvtr1Hz5LONA8jrUwKPqsmVesKa49S3Q4WxmRPUEYdTjgiUcfUwLx589ySJUva3oMkP6IYddq6HMS4o55xBJBUeRjzfa4Zdeg56QZ43LhxoyPo7Lf1kNt7oO8wWAbNwaYjIv5lhyS7kRf96dvm5Jah8vfvX3flyhX35cuX6HfzFHOToS1H4BenCaHvO8pr8iDuwoUL7tevX+b5ZdbBair0xkFIlFDlW4ZknEClsp/TzXyAKVOmmHWFVSbDNw1l1+4f90U6IY/q4V27dpnE9bJ+v87QEydjqx/UamVVPRG+mwkNTYN+9tjkwzEx+atCm/X9WvWtDtAb68Wy9LXa1UmvCDDIpPkyOQ5ZwSzJ4jMrvFcr0rSjOUh+GcT4LSg5ugkW1Io0/SCDQBojh0hPlaJdah+tkVYrnTZowP8iq1F1TgMBBauufyB33x1v+NWFYmT5KmppgHC+NkAgbmRkpD3yn9QIseXymoTQFGQmIOKTxiZIWpvAatenVqRVXf2nTrAWMsPnKrMZHz6bJq5jvce6QK8J1cQNgKxlJapMPdZSR64/UivS9NztpkVEdKcrs5alhhWP9NeqlfWopzhZScI6QxseegZRGeg5a8C3Re1Mfl1ScP36ddcUaMuv24iOJtz7sbUjTS4qBvKmstYJoUauiuD3k5qhyr7QdUHMeCgLa1Ear9NquemdXgmum4fvJ6w1lqsuDhNrg1qSpleJK7K3TF0Q2jSd94uSZ60kK1e3qyVpQK6PVWXp2/FC3mp6jBhKKOiY2h3gtUV64TWM6wDETRPLDfSakXmH3w8g9Jlug8ZtTt4kVF0kLUYYmCCtD/DrQ5YhMGbA9L3ucdjh0y8kOHW5gU/VEEmJTcL4Pz/f7mgoAbYkAAAAAElFTkSuQmCC"] + } + ] +}' +``` + +##### Response + +```json +{ + "model": "llava", + "created_at": "2023-12-13T22:42:50.203334Z", + "message": { + "role": "assistant", + "content": " The image features a cute, little pig with an angry facial expression. It's wearing a heart on its shirt and is waving in the air. This scene appears to be part of a drawing or sketching project.", + "images": null + }, + "done": true, + "total_duration": 1668506709, + "load_duration": 1986209, + "prompt_eval_count": 26, + "prompt_eval_duration": 359682000, + "eval_count": 83, + "eval_duration": 1303285000 +} +``` + +#### Chat request (Reproducible outputs) + +##### Request + +```shell +curl http://localhost:11434/api/chat -d '{ + "model": "llama3.2", + "messages": [ + { + "role": "user", + "content": "Hello!" + } + ], + "options": { + "seed": 101, + "temperature": 0 + } +}' +``` + +##### Response + +```json +{ + "model": "llama3.2", + "created_at": "2023-12-12T14:13:43.416799Z", + "message": { + "role": "assistant", + "content": "Hello! How are you today?" + }, + "done": true, + "total_duration": 5191566416, + "load_duration": 2154458, + "prompt_eval_count": 26, + "prompt_eval_duration": 383809000, + "eval_count": 298, + "eval_duration": 4799921000 +} +``` + +#### Chat request (with tools) + +##### Request + +```shell +curl http://localhost:11434/api/chat -d '{ + "model": "llama3.2", + "messages": [ + { + "role": "user", + "content": "What is the weather today in Paris?" + } + ], + "stream": false, + "tools": [ + { + "type": "function", + "function": { + "name": "get_current_weather", + "description": "Get the current weather for a location", + "parameters": { + "type": "object", + "properties": { + "location": { + "type": "string", + "description": "The location to get the weather for, e.g. San Francisco, CA" + }, + "format": { + "type": "string", + "description": "The format to return the weather in, e.g. 'celsius' or 'fahrenheit'", + "enum": ["celsius", "fahrenheit"] + } + }, + "required": ["location", "format"] + } + } + } + ] +}' +``` + +##### Response + +```json +{ + "model": "llama3.2", + "created_at": "2024-07-22T20:33:28.123648Z", + "message": { + "role": "assistant", + "content": "", + "tool_calls": [ + { + "function": { + "name": "get_current_weather", + "arguments": { + "format": "celsius", + "location": "Paris, FR" + } + } + } + ] + }, + "done_reason": "stop", + "done": true, + "total_duration": 885095291, + "load_duration": 3753500, + "prompt_eval_count": 122, + "prompt_eval_duration": 328493000, + "eval_count": 33, + "eval_duration": 552222000 +} +``` + +#### Load a model + +If the messages array is empty, the model will be loaded into memory. + +##### Request + +```shell +curl http://localhost:11434/api/chat -d '{ + "model": "llama3.2", + "messages": [] +}' +``` + +##### Response + +```json +{ + "model": "llama3.2", + "created_at":"2024-09-12T21:17:29.110811Z", + "message": { + "role": "assistant", + "content": "" + }, + "done_reason": "load", + "done": true +} +``` + +#### Unload a model + +If the messages array is empty and the `keep_alive` parameter is set to `0`, a model will be unloaded from memory. + +##### Request + +```shell +curl http://localhost:11434/api/chat -d '{ + "model": "llama3.2", + "messages": [], + "keep_alive": 0 +}' +``` + +##### Response + +A single JSON object is returned: + +```json +{ + "model": "llama3.2", + "created_at":"2024-09-12T21:33:17.547535Z", + "message": { + "role": "assistant", + "content": "" + }, + "done_reason": "unload", + "done": true +} +``` + +## Create a Model + +``` +POST /api/create +``` + +Create a model from: + * another model; + * a safetensors directory; or + * a GGUF file. + +If you are creating a model from a safetensors directory or from a GGUF file, you must [create a blob](#create-a-blob) for each of the files and then use the file name and SHA256 digest associated with each blob in the `files` field. + +### Parameters + +- `model`: name of the model to create +- `from`: (optional) name of an existing model to create the new model from +- `files`: (optional) a dictionary of file names to SHA256 digests of blobs to create the model from +- `adapters`: (optional) a dictionary of file names to SHA256 digests of blobs for LORA adapters +- `template`: (optional) the prompt template for the model +- `license`: (optional) a string or list of strings containing the license or licenses for the model +- `system`: (optional) a string containing the system prompt for the model +- `parameters`: (optional) a dictionary of parameters for the model (see [Modelfile](./modelfile.md#valid-parameters-and-values) for a list of parameters) +- `messages`: (optional) a list of message objects used to create a conversation +- `stream`: (optional) if `false` the response will be returned as a single response object, rather than a stream of objects +- `quantize` (optional): quantize a non-quantized (e.g. float16) model + +#### Quantization types + +| Type | Recommended | +| --- | :-: | +| q4_K_M | * | +| q4_K_S | | +| q8_0 | * | + +### Examples + +#### Create a new model + +Create a new model from an existing model. + +##### Request + +```shell +curl http://localhost:11434/api/create -d '{ + "model": "mario", + "from": "llama3.2", + "system": "You are Mario from Super Mario Bros." +}' +``` + +##### Response + +A stream of JSON objects is returned: + +```json +{"status":"reading model metadata"} +{"status":"creating system layer"} +{"status":"using already created layer sha256:22f7f8ef5f4c791c1b03d7eb414399294764d7cc82c7e94aa81a1feb80a983a2"} +{"status":"using already created layer sha256:8c17c2ebb0ea011be9981cc3922db8ca8fa61e828c5d3f44cb6ae342bf80460b"} +{"status":"using already created layer sha256:7c23fb36d80141c4ab8cdbb61ee4790102ebd2bf7aeff414453177d4f2110e5d"} +{"status":"using already created layer sha256:2e0493f67d0c8c9c68a8aeacdf6a38a2151cb3c4c1d42accf296e19810527988"} +{"status":"using already created layer sha256:2759286baa875dc22de5394b4a925701b1896a7e3f8e53275c36f75a877a82c9"} +{"status":"writing layer sha256:df30045fe90f0d750db82a058109cecd6d4de9c90a3d75b19c09e5f64580bb42"} +{"status":"writing layer sha256:f18a68eb09bf925bb1b669490407c1b1251c5db98dc4d3d81f3088498ea55690"} +{"status":"writing manifest"} +{"status":"success"} +``` + +#### Quantize a model + +Quantize a non-quantized model. + +##### Request + +```shell +curl http://localhost:11434/api/create -d '{ + "model": "llama3.2:quantized", + "from": "llama3.2:3b-instruct-fp16", + "quantize": "q4_K_M" +}' +``` + +##### Response + +A stream of JSON objects is returned: + +```json +{"status":"quantizing F16 model to Q4_K_M","digest":"0","total":6433687776,"completed":12302} +{"status":"quantizing F16 model to Q4_K_M","digest":"0","total":6433687776,"completed":6433687552} +{"status":"verifying conversion"} +{"status":"creating new layer sha256:fb7f4f211b89c6c4928ff4ddb73db9f9c0cfca3e000c3e40d6cf27ddc6ca72eb"} +{"status":"using existing layer sha256:966de95ca8a62200913e3f8bfbf84c8494536f1b94b49166851e76644e966396"} +{"status":"using existing layer sha256:fcc5a6bec9daf9b561a68827b67ab6088e1dba9d1fa2a50d7bbcc8384e0a265d"} +{"status":"using existing layer sha256:a70ff7e570d97baaf4e62ac6e6ad9975e04caa6d900d3742d37698494479e0cd"} +{"status":"using existing layer sha256:56bb8bd477a519ffa694fc449c2413c6f0e1d3b1c88fa7e3c9d88d3ae49d4dcb"} +{"status":"writing manifest"} +{"status":"success"} +``` + +#### Create a model from GGUF + +Create a model from a GGUF file. The `files` parameter should be filled out with the file name and SHA256 digest of the GGUF file you wish to use. Use [/api/blobs/:digest](#push-a-blob) to push the GGUF file to the server before calling this API. + + +##### Request + +```shell +curl http://localhost:11434/api/create -d '{ + "model": "my-gguf-model", + "files": { + "test.gguf": "sha256:432f310a77f4650a88d0fd59ecdd7cebed8d684bafea53cbff0473542964f0c3" + } +}' +``` + +##### Response + +A stream of JSON objects is returned: + +```json +{"status":"parsing GGUF"} +{"status":"using existing layer sha256:432f310a77f4650a88d0fd59ecdd7cebed8d684bafea53cbff0473542964f0c3"} +{"status":"writing manifest"} +{"status":"success"} +``` + + +#### Create a model from a Safetensors directory + +The `files` parameter should include a dictionary of files for the safetensors model which includes the file names and SHA256 digest of each file. Use [/api/blobs/:digest](#push-a-blob) to first push each of the files to the server before calling this API. Files will remain in the cache until the Ollama server is restarted. + +##### Request + +```shell +curl http://localhost:11434/api/create -d '{ + "model": "fred", + "files": { + "config.json": "sha256:dd3443e529fb2290423a0c65c2d633e67b419d273f170259e27297219828e389", + "generation_config.json": "sha256:88effbb63300dbbc7390143fbbdd9d9fa50587b37e8bfd16c8c90d4970a74a36", + "special_tokens_map.json": "sha256:b7455f0e8f00539108837bfa586c4fbf424e31f8717819a6798be74bef813d05", + "tokenizer.json": "sha256:bbc1904d35169c542dffbe1f7589a5994ec7426d9e5b609d07bab876f32e97ab", + "tokenizer_config.json": "sha256:24e8a6dc2547164b7002e3125f10b415105644fcf02bf9ad8b674c87b1eaaed6", + "model.safetensors": "sha256:1ff795ff6a07e6a68085d206fb84417da2f083f68391c2843cd2b8ac6df8538f" + } +}' +``` + +##### Response + +A stream of JSON objects is returned: + +```shell +{"status":"converting model"} +{"status":"creating new layer sha256:05ca5b813af4a53d2c2922933936e398958855c44ee534858fcfd830940618b6"} +{"status":"using autodetected template llama3-instruct"} +{"status":"using existing layer sha256:56bb8bd477a519ffa694fc449c2413c6f0e1d3b1c88fa7e3c9d88d3ae49d4dcb"} +{"status":"writing manifest"} +{"status":"success"} +``` + +## Check if a Blob Exists + +```shell +HEAD /api/blobs/:digest +``` + +Ensures that the file blob (Binary Large Object) used with create a model exists on the server. This checks your Ollama server and not ollama.com. + +### Query Parameters + +- `digest`: the SHA256 digest of the blob + +### Examples + +#### Request + +```shell +curl -I http://localhost:11434/api/blobs/sha256:29fdb92e57cf0827ded04ae6461b5931d01fa595843f55d36f5b275a52087dd2 +``` + +#### Response + +Return 200 OK if the blob exists, 404 Not Found if it does not. + +## Push a Blob + +``` +POST /api/blobs/:digest +``` + +Push a file to the Ollama server to create a "blob" (Binary Large Object). + +### Query Parameters + +- `digest`: the expected SHA256 digest of the file + +### Examples + +#### Request + +```shell +curl -T model.gguf -X POST http://localhost:11434/api/blobs/sha256:29fdb92e57cf0827ded04ae6461b5931d01fa595843f55d36f5b275a52087dd2 +``` + +#### Response + +Return 201 Created if the blob was successfully created, 400 Bad Request if the digest used is not expected. + +## List Local Models + +``` +GET /api/tags +``` + +List models that are available locally. + +### Examples + +#### Request + +```shell +curl http://localhost:11434/api/tags +``` + +#### Response + +A single JSON object will be returned. + +```json +{ + "models": [ + { + "name": "deepseek-r1:latest", + "model": "deepseek-r1:latest", + "modified_at": "2025-05-10T08:06:48.639712648-07:00", + "size": 4683075271, + "digest": "0a8c266910232fd3291e71e5ba1e058cc5af9d411192cf88b6d30e92b6e73163", + "details": { + "parent_model": "", + "format": "gguf", + "family": "qwen2", + "families": [ + "qwen2" + ], + "parameter_size": "7.6B", + "quantization_level": "Q4_K_M" + } + }, + { + "name": "llama3.2:latest", + "model": "llama3.2:latest", + "modified_at": "2025-05-04T17:37:44.706015396-07:00", + "size": 2019393189, + "digest": "a80c4f17acd55265feec403c7aef86be0c25983ab279d83f3bcd3abbcb5b8b72", + "details": { + "parent_model": "", + "format": "gguf", + "family": "llama", + "families": [ + "llama" + ], + "parameter_size": "3.2B", + "quantization_level": "Q4_K_M" + } + } + ] +} +``` + +## Show Model Information + +``` +POST /api/show +``` + +Show information about a model including details, modelfile, template, parameters, license, system prompt. + +### Parameters + +- `model`: name of the model to show +- `verbose`: (optional) if set to `true`, returns full data for verbose response fields + +### Examples + +#### Request + +```shell +curl http://localhost:11434/api/show -d '{ + "model": "llava" +}' +``` + +#### Response + +```json5 +{ + "modelfile": "# Modelfile generated by \"ollama show\"\n# To build a new Modelfile based on this one, replace the FROM line with:\n# FROM llava:latest\n\nFROM /Users/matt/.ollama/models/blobs/sha256:200765e1283640ffbd013184bf496e261032fa75b99498a9613be4e94d63ad52\nTEMPLATE \"\"\"{{ .System }}\nUSER: {{ .Prompt }}\nASSISTANT: \"\"\"\nPARAMETER num_ctx 4096\nPARAMETER stop \"\u003c/s\u003e\"\nPARAMETER stop \"USER:\"\nPARAMETER stop \"ASSISTANT:\"", + "parameters": "num_keep 24\nstop \"<|start_header_id|>\"\nstop \"<|end_header_id|>\"\nstop \"<|eot_id|>\"", + "template": "{{ if .System }}<|start_header_id|>system<|end_header_id|>\n\n{{ .System }}<|eot_id|>{{ end }}{{ if .Prompt }}<|start_header_id|>user<|end_header_id|>\n\n{{ .Prompt }}<|eot_id|>{{ end }}<|start_header_id|>assistant<|end_header_id|>\n\n{{ .Response }}<|eot_id|>", + "details": { + "parent_model": "", + "format": "gguf", + "family": "llama", + "families": [ + "llama" + ], + "parameter_size": "8.0B", + "quantization_level": "Q4_0" + }, + "model_info": { + "general.architecture": "llama", + "general.file_type": 2, + "general.parameter_count": 8030261248, + "general.quantization_version": 2, + "llama.attention.head_count": 32, + "llama.attention.head_count_kv": 8, + "llama.attention.layer_norm_rms_epsilon": 0.00001, + "llama.block_count": 32, + "llama.context_length": 8192, + "llama.embedding_length": 4096, + "llama.feed_forward_length": 14336, + "llama.rope.dimension_count": 128, + "llama.rope.freq_base": 500000, + "llama.vocab_size": 128256, + "tokenizer.ggml.bos_token_id": 128000, + "tokenizer.ggml.eos_token_id": 128009, + "tokenizer.ggml.merges": [], // populates if `verbose=true` + "tokenizer.ggml.model": "gpt2", + "tokenizer.ggml.pre": "llama-bpe", + "tokenizer.ggml.token_type": [], // populates if `verbose=true` + "tokenizer.ggml.tokens": [] // populates if `verbose=true` + }, + "capabilities": [ + "completion", + "vision" + ], +} +``` + +## Copy a Model + +``` +POST /api/copy +``` + +Copy a model. Creates a model with another name from an existing model. + +### Examples + +#### Request + +```shell +curl http://localhost:11434/api/copy -d '{ + "source": "llama3.2", + "destination": "llama3-backup" +}' +``` + +#### Response + +Returns a 200 OK if successful, or a 404 Not Found if the source model doesn't exist. + +## Delete a Model + +``` +DELETE /api/delete +``` + +Delete a model and its data. + +### Parameters + +- `model`: model name to delete + +### Examples + +#### Request + +```shell +curl -X DELETE http://localhost:11434/api/delete -d '{ + "model": "llama3:13b" +}' +``` + +#### Response + +Returns a 200 OK if successful, 404 Not Found if the model to be deleted doesn't exist. + +## Pull a Model + +``` +POST /api/pull +``` + +Download a model from the ollama library. Cancelled pulls are resumed from where they left off, and multiple calls will share the same download progress. + +### Parameters + +- `model`: name of the model to pull +- `insecure`: (optional) allow insecure connections to the library. Only use this if you are pulling from your own library during development. +- `stream`: (optional) if `false` the response will be returned as a single response object, rather than a stream of objects + +### Examples + +#### Request + +```shell +curl http://localhost:11434/api/pull -d '{ + "model": "llama3.2" +}' +``` + +#### Response + +If `stream` is not specified, or set to `true`, a stream of JSON objects is returned: + +The first object is the manifest: + +```json +{ + "status": "pulling manifest" +} +``` + +Then there is a series of downloading responses. Until any of the download is completed, the `completed` key may not be included. The number of files to be downloaded depends on the number of layers specified in the manifest. + +```json +{ + "status": "downloading digestname", + "digest": "digestname", + "total": 2142590208, + "completed": 241970 +} +``` + +After all the files are downloaded, the final responses are: + +```json +{ + "status": "verifying sha256 digest" +} +{ + "status": "writing manifest" +} +{ + "status": "removing any unused layers" +} +{ + "status": "success" +} +``` + +if `stream` is set to false, then the response is a single JSON object: + +```json +{ + "status": "success" +} +``` + +## Push a Model + +``` +POST /api/push +``` + +Upload a model to a model library. Requires registering for ollama.ai and adding a public key first. + +### Parameters + +- `model`: name of the model to push in the form of `/:` +- `insecure`: (optional) allow insecure connections to the library. Only use this if you are pushing to your library during development. +- `stream`: (optional) if `false` the response will be returned as a single response object, rather than a stream of objects + +### Examples + +#### Request + +```shell +curl http://localhost:11434/api/push -d '{ + "model": "mattw/pygmalion:latest" +}' +``` + +#### Response + +If `stream` is not specified, or set to `true`, a stream of JSON objects is returned: + +```json +{ "status": "retrieving manifest" } +``` + +and then: + +```json +{ + "status": "starting upload", + "digest": "sha256:bc07c81de745696fdf5afca05e065818a8149fb0c77266fb584d9b2cba3711ab", + "total": 1928429856 +} +``` + +Then there is a series of uploading responses: + +```json +{ + "status": "starting upload", + "digest": "sha256:bc07c81de745696fdf5afca05e065818a8149fb0c77266fb584d9b2cba3711ab", + "total": 1928429856 +} +``` + +Finally, when the upload is complete: + +```json +{"status":"pushing manifest"} +{"status":"success"} +``` + +If `stream` is set to `false`, then the response is a single JSON object: + +```json +{ "status": "success" } +``` + +## Generate Embeddings + +``` +POST /api/embed +``` + +Generate embeddings from a model + +### Parameters + +- `model`: name of model to generate embeddings from +- `input`: text or list of text to generate embeddings for + +Advanced parameters: + +- `truncate`: truncates the end of each input to fit within context length. Returns error if `false` and context length is exceeded. Defaults to `true` +- `options`: additional model parameters listed in the documentation for the [Modelfile](./modelfile.md#valid-parameters-and-values) such as `temperature` +- `keep_alive`: controls how long the model will stay loaded into memory following the request (default: `5m`) + +### Examples + +#### Request + +```shell +curl http://localhost:11434/api/embed -d '{ + "model": "all-minilm", + "input": "Why is the sky blue?" +}' +``` + +#### Response + +```json +{ + "model": "all-minilm", + "embeddings": [[ + 0.010071029, -0.0017594862, 0.05007221, 0.04692972, 0.054916814, + 0.008599704, 0.105441414, -0.025878139, 0.12958129, 0.031952348 + ]], + "total_duration": 14143917, + "load_duration": 1019500, + "prompt_eval_count": 8 +} +``` + +#### Request (Multiple input) + +```shell +curl http://localhost:11434/api/embed -d '{ + "model": "all-minilm", + "input": ["Why is the sky blue?", "Why is the grass green?"] +}' +``` + +#### Response + +```json +{ + "model": "all-minilm", + "embeddings": [[ + 0.010071029, -0.0017594862, 0.05007221, 0.04692972, 0.054916814, + 0.008599704, 0.105441414, -0.025878139, 0.12958129, 0.031952348 + ],[ + -0.0098027075, 0.06042469, 0.025257962, -0.006364387, 0.07272725, + 0.017194884, 0.09032035, -0.051705178, 0.09951512, 0.09072481 + ]] +} +``` + +## List Running Models +``` +GET /api/ps +``` + +List models that are currently loaded into memory. + +#### Examples + +### Request + +```shell +curl http://localhost:11434/api/ps +``` + +#### Response + +A single JSON object will be returned. + +```json +{ + "models": [ + { + "name": "mistral:latest", + "model": "mistral:latest", + "size": 5137025024, + "digest": "2ae6f6dd7a3dd734790bbbf58b8909a606e0e7e97e94b7604e0aa7ae4490e6d8", + "details": { + "parent_model": "", + "format": "gguf", + "family": "llama", + "families": [ + "llama" + ], + "parameter_size": "7.2B", + "quantization_level": "Q4_0" + }, + "expires_at": "2024-06-04T14:38:31.83753-07:00", + "size_vram": 5137025024 + } + ] +} +``` + +## Generate Embedding + +> Note: this endpoint has been superseded by `/api/embed` + +``` +POST /api/embeddings +``` + +Generate embeddings from a model + +### Parameters + +- `model`: name of model to generate embeddings from +- `prompt`: text to generate embeddings for + +Advanced parameters: + +- `options`: additional model parameters listed in the documentation for the [Modelfile](./modelfile.md#valid-parameters-and-values) such as `temperature` +- `keep_alive`: controls how long the model will stay loaded into memory following the request (default: `5m`) + +### Examples + +#### Request + +```shell +curl http://localhost:11434/api/embeddings -d '{ + "model": "all-minilm", + "prompt": "Here is an article about llamas..." +}' +``` + +#### Response + +```json +{ + "embedding": [ + 0.5670403838157654, 0.009260174818336964, 0.23178744316101074, -0.2916173040866852, -0.8924556970596313, + 0.8785552978515625, -0.34576427936553955, 0.5742510557174683, -0.04222835972905159, -0.137906014919281 + ] +} +``` + +## Version + +``` +GET /api/version +``` + +Retrieve the Ollama version + +### Examples + +#### Request + +```shell +curl http://localhost:11434/api/version +``` + +#### Response + +```json +{ + "version": "0.5.1" +} +``` + + diff --git a/docs/benchmark.md b/docs/benchmark.md new file mode 100644 index 0000000..a7bed80 --- /dev/null +++ b/docs/benchmark.md @@ -0,0 +1,59 @@ +# Benchmark + +Go benchmark tests that measure end-to-end performance of a running Ollama server. Run these tests to evaluate model inference performance on your hardware and measure the impact of code changes. + +## When to use + +Run these benchmarks when: +- Making changes to the model inference engine +- Modifying model loading/unloading logic +- Changing prompt processing or token generation code +- Implementing a new model architecture +- Testing performance across different hardware setups + +## Prerequisites +- Ollama server running locally with `ollama serve` on `127.0.0.1:11434` +## Usage and Examples + +>[!NOTE] +>All commands must be run from the root directory of the Ollama project. + +Basic syntax: +```bash +go test -bench=. ./benchmark/... -m $MODEL_NAME +``` + +Required flags: +- `-bench=.`: Run all benchmarks +- `-m`: Model name to benchmark + +Optional flags: +- `-count N`: Number of times to run the benchmark (useful for statistical analysis) +- `-timeout T`: Maximum time for the benchmark to run (e.g. "10m" for 10 minutes) + +Common usage patterns: + +Single benchmark run with a model specified: +```bash +go test -bench=. ./benchmark/... -m llama3.3 +``` + +## Output metrics + +The benchmark reports several key metrics: + +- `gen_tok/s`: Generated tokens per second +- `prompt_tok/s`: Prompt processing tokens per second +- `ttft_ms`: Time to first token in milliseconds +- `load_ms`: Model load time in milliseconds +- `gen_tokens`: Total tokens generated +- `prompt_tokens`: Total prompt tokens processed + +Each benchmark runs two scenarios: +- Cold start: Model is loaded from disk for each test +- Warm start: Model is pre-loaded in memory + +Three prompt lengths are tested for each scenario: +- Short prompt (100 tokens) +- Medium prompt (500 tokens) +- Long prompt (1000 tokens) diff --git a/docs/development.md b/docs/development.md new file mode 100644 index 0000000..cf6d91e --- /dev/null +++ b/docs/development.md @@ -0,0 +1,159 @@ +# Development + +Install prerequisites: + +- [Go](https://go.dev/doc/install) +- C/C++ Compiler e.g. Clang on macOS, [TDM-GCC](https://github.com/jmeubank/tdm-gcc/releases/latest) (Windows amd64) or [llvm-mingw](https://github.com/mstorsjo/llvm-mingw) (Windows arm64), GCC/Clang on Linux. + +Then build and run Ollama from the root directory of the repository: + +```shell +go run . serve +``` + +## macOS (Apple Silicon) + +macOS Apple Silicon supports Metal which is built-in to the Ollama binary. No additional steps are required. + +## macOS (Intel) + +Install prerequisites: + +- [CMake](https://cmake.org/download/) or `brew install cmake` + +Then, configure and build the project: + +```shell +cmake -B build +cmake --build build +``` + +Lastly, run Ollama: + +```shell +go run . serve +``` + +## Windows + +Install prerequisites: + +- [CMake](https://cmake.org/download/) +- [Visual Studio 2022](https://visualstudio.microsoft.com/downloads/) including the Native Desktop Workload +- (Optional) AMD GPU support + - [ROCm](https://rocm.docs.amd.com/en/latest/) + - [Ninja](https://github.com/ninja-build/ninja/releases) +- (Optional) NVIDIA GPU support + - [CUDA SDK](https://developer.nvidia.com/cuda-downloads?target_os=Windows&target_arch=x86_64&target_version=11&target_type=exe_network) + +Then, configure and build the project: + +```shell +cmake -B build +cmake --build build --config Release +``` + +> [!IMPORTANT] +> Building for ROCm requires additional flags: +> ``` +> cmake -B build -G Ninja -DCMAKE_C_COMPILER=clang -DCMAKE_CXX_COMPILER=clang++ +> cmake --build build --config Release +> ``` + + +Lastly, run Ollama: + +```shell +go run . serve +``` + +## Windows (ARM) + +Windows ARM does not support additional acceleration libraries at this time. Do not use cmake, simply `go run` or `go build`. + +## Linux + +Install prerequisites: + +- [CMake](https://cmake.org/download/) or `sudo apt install cmake` or `sudo dnf install cmake` +- (Optional) AMD GPU support + - [ROCm](https://rocm.docs.amd.com/projects/install-on-linux/en/latest/install/quick-start.html) +- (Optional) NVIDIA GPU support + - [CUDA SDK](https://developer.nvidia.com/cuda-downloads) + +> [!IMPORTANT] +> Ensure prerequisites are in `PATH` before running CMake. + + +Then, configure and build the project: + +```shell +cmake -B build +cmake --build build +``` + +Lastly, run Ollama: + +```shell +go run . serve +``` + +## Docker + +```shell +docker build . +``` + +### ROCm + +```shell +docker build --build-arg FLAVOR=rocm . +``` + +## Running tests + +To run tests, use `go test`: + +```shell +go test ./... +``` + +> NOTE: In rare cirumstances, you may nedd to change a package using the new +> "synctest" package in go1.24. +> +> If you do not have the "synctest" package enabled, you will not see build or +> test failures resulting from your change(s), if any, locally, but CI will +> break. +> +> If you see failures in CI, you can either keep pushing changes to see if the +> CI build passes, or you can enable the "synctest" package locally to see the +> failures before pushing. +> +> To enable the "synctest" package for testing, run the following command: +> +> ```shell +> GOEXPERIMENT=synctest go test ./... +> ``` +> +> If you wish to enable synctest for all go commands, you can set the +> `GOEXPERIMENT` environment variable in your shell profile or by using: +> +> ```shell +> go env -w GOEXPERIMENT=synctest +> ``` +> +> Which will enable the "synctest" package for all go commands without needing +> to set it for all shell sessions. +> +> The synctest package is not required for production builds. + +## Library detection + +Ollama looks for acceleration libraries in the following paths relative to the `ollama` executable: + +* `./lib/ollama` (Windows) +* `../lib/ollama` (Linux) +* `.` (macOS) +* `build/lib/ollama` (for development) + +If the libraries are not found, Ollama will not run with any acceleration libraries. diff --git a/docs/docker.md b/docs/docker.md new file mode 100644 index 0000000..dce090a --- /dev/null +++ b/docs/docker.md @@ -0,0 +1,78 @@ +# Ollama Docker image + +### CPU only + +```shell +docker run -d -v ollama:/root/.ollama -p 11434:11434 --name ollama ollama/ollama +``` + +### Nvidia GPU +Install the [NVIDIA Container Toolkit](https://docs.nvidia.com/datacenter/cloud-native/container-toolkit/latest/install-guide.html#installation). + +#### Install with Apt +1. Configure the repository + + ```shell + curl -fsSL https://nvidia.github.io/libnvidia-container/gpgkey \ + | sudo gpg --dearmor -o /usr/share/keyrings/nvidia-container-toolkit-keyring.gpg + curl -s -L https://nvidia.github.io/libnvidia-container/stable/deb/nvidia-container-toolkit.list \ + | sed 's#deb https://#deb [signed-by=/usr/share/keyrings/nvidia-container-toolkit-keyring.gpg] https://#g' \ + | sudo tee /etc/apt/sources.list.d/nvidia-container-toolkit.list + sudo apt-get update + ``` + +2. Install the NVIDIA Container Toolkit packages + + ```shell + sudo apt-get install -y nvidia-container-toolkit + ``` + +#### Install with Yum or Dnf +1. Configure the repository + + ```shell + curl -s -L https://nvidia.github.io/libnvidia-container/stable/rpm/nvidia-container-toolkit.repo \ + | sudo tee /etc/yum.repos.d/nvidia-container-toolkit.repo + ``` + +2. Install the NVIDIA Container Toolkit packages + + ```shell + sudo yum install -y nvidia-container-toolkit + ``` + +#### Configure Docker to use Nvidia driver + +```shell +sudo nvidia-ctk runtime configure --runtime=docker +sudo systemctl restart docker +``` + +#### Start the container + +```shell +docker run -d --gpus=all -v ollama:/root/.ollama -p 11434:11434 --name ollama ollama/ollama +``` + +> [!NOTE] +> If you're running on an NVIDIA JetPack system, Ollama can't automatically discover the correct JetPack version. Pass the environment variable JETSON_JETPACK=5 or JETSON_JETPACK=6 to the container to select version 5 or 6. + +### AMD GPU + +To run Ollama using Docker with AMD GPUs, use the `rocm` tag and the following command: + +```shell +docker run -d --device /dev/kfd --device /dev/dri -v ollama:/root/.ollama -p 11434:11434 --name ollama ollama/ollama:rocm +``` + +### Run model locally + +Now you can run a model: + +```shell +docker exec -it ollama ollama run llama3.2 +``` + +### Try different models + +More models can be found on the [Ollama library](https://ollama.com/library). diff --git a/docs/examples.md b/docs/examples.md new file mode 100644 index 0000000..25f6563 --- /dev/null +++ b/docs/examples.md @@ -0,0 +1,20 @@ +# Examples + +This directory contains different examples of using Ollama. + +## Python examples +Ollama Python examples at [ollama-python/examples](https://github.com/ollama/ollama-python/tree/main/examples) + + +## JavaScript examples +Ollama JavaScript examples at [ollama-js/examples](https://github.com/ollama/ollama-js/tree/main/examples) + + +## OpenAI compatibility examples +Ollama OpenAI compatibility examples at [ollama/examples/openai](../docs/openai.md) + + +## Community examples + +- [LangChain Ollama Python](https://python.langchain.com/docs/integrations/chat/ollama/) +- [LangChain Ollama JS](https://js.langchain.com/docs/integrations/chat/ollama/) diff --git a/docs/faq.md b/docs/faq.md new file mode 100644 index 0000000..6fe6334 --- /dev/null +++ b/docs/faq.md @@ -0,0 +1,335 @@ +# FAQ + +## How can I upgrade Ollama? + +Ollama on macOS and Windows will automatically download updates. Click on the taskbar or menubar item and then click "Restart to update" to apply the update. Updates can also be installed by downloading the latest version [manually](https://ollama.com/download/). + +On Linux, re-run the install script: + +```shell +curl -fsSL https://ollama.com/install.sh | sh +``` + +## How can I view the logs? + +Review the [Troubleshooting](./troubleshooting.md) docs for more about using logs. + +## Is my GPU compatible with Ollama? + +Please refer to the [GPU docs](./gpu.md). + +## How can I specify the context window size? + +By default, Ollama uses a context window size of 4096 tokens. + +This can be overridden with the `OLLAMA_CONTEXT_LENGTH` environment variable. For example, to set the default context window to 8K, use: + +```shell +OLLAMA_CONTEXT_LENGTH=8192 ollama serve +``` + +To change this when using `ollama run`, use `/set parameter`: + +```shell +/set parameter num_ctx 4096 +``` + +When using the API, specify the `num_ctx` parameter: + +```shell +curl http://localhost:11434/api/generate -d '{ + "model": "llama3.2", + "prompt": "Why is the sky blue?", + "options": { + "num_ctx": 4096 + } +}' +``` + +## How can I tell if my model was loaded onto the GPU? + +Use the `ollama ps` command to see what models are currently loaded into memory. + +```shell +ollama ps +``` + +> **Output**: +> +> ``` +> NAME ID SIZE PROCESSOR UNTIL +> llama3:70b bcfb190ca3a7 42 GB 100% GPU 4 minutes from now +> ``` + +The `Processor` column will show which memory the model was loaded in to: +* `100% GPU` means the model was loaded entirely into the GPU +* `100% CPU` means the model was loaded entirely in system memory +* `48%/52% CPU/GPU` means the model was loaded partially onto both the GPU and into system memory + +## How do I configure Ollama server? + +Ollama server can be configured with environment variables. + +### Setting environment variables on Mac + +If Ollama is run as a macOS application, environment variables should be set using `launchctl`: + +1. For each environment variable, call `launchctl setenv`. + + ```bash + launchctl setenv OLLAMA_HOST "0.0.0.0:11434" + ``` + +2. Restart Ollama application. + +### Setting environment variables on Linux + +If Ollama is run as a systemd service, environment variables should be set using `systemctl`: + +1. Edit the systemd service by calling `systemctl edit ollama.service`. This will open an editor. + +2. For each environment variable, add a line `Environment` under section `[Service]`: + + ```ini + [Service] + Environment="OLLAMA_HOST=0.0.0.0:11434" + ``` + +3. Save and exit. + +4. Reload `systemd` and restart Ollama: + + ```shell + systemctl daemon-reload + systemctl restart ollama + ``` + +### Setting environment variables on Windows + +On Windows, Ollama inherits your user and system environment variables. + +1. First Quit Ollama by clicking on it in the task bar. + +2. Start the Settings (Windows 11) or Control Panel (Windows 10) application and search for _environment variables_. + +3. Click on _Edit environment variables for your account_. + +4. Edit or create a new variable for your user account for `OLLAMA_HOST`, `OLLAMA_MODELS`, etc. + +5. Click OK/Apply to save. + +6. Start the Ollama application from the Windows Start menu. + +## How do I use Ollama behind a proxy? + +Ollama pulls models from the Internet and may require a proxy server to access the models. Use `HTTPS_PROXY` to redirect outbound requests through the proxy. Ensure the proxy certificate is installed as a system certificate. Refer to the section above for how to use environment variables on your platform. + +> [!NOTE] +> Avoid setting `HTTP_PROXY`. Ollama does not use HTTP for model pulls, only HTTPS. Setting `HTTP_PROXY` may interrupt client connections to the server. + +### How do I use Ollama behind a proxy in Docker? + +The Ollama Docker container image can be configured to use a proxy by passing `-e HTTPS_PROXY=https://proxy.example.com` when starting the container. + +Alternatively, the Docker daemon can be configured to use a proxy. Instructions are available for Docker Desktop on [macOS](https://docs.docker.com/desktop/settings/mac/#proxies), [Windows](https://docs.docker.com/desktop/settings/windows/#proxies), and [Linux](https://docs.docker.com/desktop/settings/linux/#proxies), and Docker [daemon with systemd](https://docs.docker.com/config/daemon/systemd/#httphttps-proxy). + +Ensure the certificate is installed as a system certificate when using HTTPS. This may require a new Docker image when using a self-signed certificate. + +```dockerfile +FROM ollama/ollama +COPY my-ca.pem /usr/local/share/ca-certificates/my-ca.crt +RUN update-ca-certificates +``` + +Build and run this image: + +```shell +docker build -t ollama-with-ca . +docker run -d -e HTTPS_PROXY=https://my.proxy.example.com -p 11434:11434 ollama-with-ca +``` + +## Does Ollama send my prompts and answers back to ollama.com? + +No. Ollama runs locally, and conversation data does not leave your machine. + +## How can I expose Ollama on my network? + +Ollama binds 127.0.0.1 port 11434 by default. Change the bind address with the `OLLAMA_HOST` environment variable. + +Refer to the section [above](#how-do-i-configure-ollama-server) for how to set environment variables on your platform. + +## How can I use Ollama with a proxy server? + +Ollama runs an HTTP server and can be exposed using a proxy server such as Nginx. To do so, configure the proxy to forward requests and optionally set required headers (if not exposing Ollama on the network). For example, with Nginx: + +```nginx +server { + listen 80; + server_name example.com; # Replace with your domain or IP + location / { + proxy_pass http://localhost:11434; + proxy_set_header Host localhost:11434; + } +} +``` + +## How can I use Ollama with ngrok? + +Ollama can be accessed using a range of tools for tunneling tools. For example with Ngrok: + +```shell +ngrok http 11434 --host-header="localhost:11434" +``` + +## How can I use Ollama with Cloudflare Tunnel? + +To use Ollama with Cloudflare Tunnel, use the `--url` and `--http-host-header` flags: + +```shell +cloudflared tunnel --url http://localhost:11434 --http-host-header="localhost:11434" +``` + +## How can I allow additional web origins to access Ollama? + +Ollama allows cross-origin requests from `127.0.0.1` and `0.0.0.0` by default. Additional origins can be configured with `OLLAMA_ORIGINS`. + +For browser extensions, you'll need to explicitly allow the extension's origin pattern. Set `OLLAMA_ORIGINS` to include `chrome-extension://*`, `moz-extension://*`, and `safari-web-extension://*` if you wish to allow all browser extensions access, or specific extensions as needed: + +``` +# Allow all Chrome, Firefox, and Safari extensions +OLLAMA_ORIGINS=chrome-extension://*,moz-extension://*,safari-web-extension://* ollama serve +``` + +Refer to the section [above](#how-do-i-configure-ollama-server) for how to set environment variables on your platform. + +## Where are models stored? + +- macOS: `~/.ollama/models` +- Linux: `/usr/share/ollama/.ollama/models` +- Windows: `C:\Users\%username%\.ollama\models` + +### How do I set them to a different location? + +If a different directory needs to be used, set the environment variable `OLLAMA_MODELS` to the chosen directory. + +> Note: on Linux using the standard installer, the `ollama` user needs read and write access to the specified directory. To assign the directory to the `ollama` user run `sudo chown -R ollama:ollama `. + +Refer to the section [above](#how-do-i-configure-ollama-server) for how to set environment variables on your platform. + +## How can I use Ollama in Visual Studio Code? + +There is already a large collection of plugins available for VSCode as well as other editors that leverage Ollama. See the list of [extensions & plugins](https://github.com/ollama/ollama#extensions--plugins) at the bottom of the main repository readme. + +## How do I use Ollama with GPU acceleration in Docker? + +The Ollama Docker container can be configured with GPU acceleration in Linux or Windows (with WSL2). This requires the [nvidia-container-toolkit](https://github.com/NVIDIA/nvidia-container-toolkit). See [ollama/ollama](https://hub.docker.com/r/ollama/ollama) for more details. + +GPU acceleration is not available for Docker Desktop in macOS due to the lack of GPU passthrough and emulation. + +## Why is networking slow in WSL2 on Windows 10? + +This can impact both installing Ollama, as well as downloading models. + +Open `Control Panel > Networking and Internet > View network status and tasks` and click on `Change adapter settings` on the left panel. Find the `vEthernel (WSL)` adapter, right click and select `Properties`. +Click on `Configure` and open the `Advanced` tab. Search through each of the properties until you find `Large Send Offload Version 2 (IPv4)` and `Large Send Offload Version 2 (IPv6)`. *Disable* both of these +properties. + +## How can I preload a model into Ollama to get faster response times? + +If you are using the API you can preload a model by sending the Ollama server an empty request. This works with both the `/api/generate` and `/api/chat` API endpoints. + +To preload the mistral model using the generate endpoint, use: + +```shell +curl http://localhost:11434/api/generate -d '{"model": "mistral"}' +``` + +To use the chat completions endpoint, use: + +```shell +curl http://localhost:11434/api/chat -d '{"model": "mistral"}' +``` + +To preload a model using the CLI, use the command: + +```shell +ollama run llama3.2 "" +``` + +## How do I keep a model loaded in memory or make it unload immediately? + +By default models are kept in memory for 5 minutes before being unloaded. This allows for quicker response times if you're making numerous requests to the LLM. If you want to immediately unload a model from memory, use the `ollama stop` command: + +```shell +ollama stop llama3.2 +``` + +If you're using the API, use the `keep_alive` parameter with the `/api/generate` and `/api/chat` endpoints to set the amount of time that a model stays in memory. The `keep_alive` parameter can be set to: +* a duration string (such as "10m" or "24h") +* a number in seconds (such as 3600) +* any negative number which will keep the model loaded in memory (e.g. -1 or "-1m") +* '0' which will unload the model immediately after generating a response + +For example, to preload a model and leave it in memory use: + +```shell +curl http://localhost:11434/api/generate -d '{"model": "llama3.2", "keep_alive": -1}' +``` + +To unload the model and free up memory use: + +```shell +curl http://localhost:11434/api/generate -d '{"model": "llama3.2", "keep_alive": 0}' +``` + +Alternatively, you can change the amount of time all models are loaded into memory by setting the `OLLAMA_KEEP_ALIVE` environment variable when starting the Ollama server. The `OLLAMA_KEEP_ALIVE` variable uses the same parameter types as the `keep_alive` parameter types mentioned above. Refer to the section explaining [how to configure the Ollama server](#how-do-i-configure-ollama-server) to correctly set the environment variable. + +The `keep_alive` API parameter with the `/api/generate` and `/api/chat` API endpoints will override the `OLLAMA_KEEP_ALIVE` setting. + +## How do I manage the maximum number of requests the Ollama server can queue? + +If too many requests are sent to the server, it will respond with a 503 error indicating the server is overloaded. You can adjust how many requests may be queue by setting `OLLAMA_MAX_QUEUE`. + +## How does Ollama handle concurrent requests? + +Ollama supports two levels of concurrent processing. If your system has sufficient available memory (system memory when using CPU inference, or VRAM for GPU inference) then multiple models can be loaded at the same time. For a given model, if there is sufficient available memory when the model is loaded, it is configured to allow parallel request processing. + +If there is insufficient available memory to load a new model request while one or more models are already loaded, all new requests will be queued until the new model can be loaded. As prior models become idle, one or more will be unloaded to make room for the new model. Queued requests will be processed in order. When using GPU inference new models must be able to completely fit in VRAM to allow concurrent model loads. + +Parallel request processing for a given model results in increasing the context size by the number of parallel requests. For example, a 2K context with 4 parallel requests will result in an 8K context and additional memory allocation. + +The following server settings may be used to adjust how Ollama handles concurrent requests on most platforms: + +- `OLLAMA_MAX_LOADED_MODELS` - The maximum number of models that can be loaded concurrently provided they fit in available memory. The default is 3 * the number of GPUs or 3 for CPU inference. +- `OLLAMA_NUM_PARALLEL` - The maximum number of parallel requests each model will process at the same time. The default will auto-select either 4 or 1 based on available memory. +- `OLLAMA_MAX_QUEUE` - The maximum number of requests Ollama will queue when busy before rejecting additional requests. The default is 512 + +Note: Windows with Radeon GPUs currently default to 1 model maximum due to limitations in ROCm v5.7 for available VRAM reporting. Once ROCm v6.2 is available, Windows Radeon will follow the defaults above. You may enable concurrent model loads on Radeon on Windows, but ensure you don't load more models than will fit into your GPUs VRAM. + +## How does Ollama load models on multiple GPUs? + +When loading a new model, Ollama evaluates the required VRAM for the model against what is currently available. If the model will entirely fit on any single GPU, Ollama will load the model on that GPU. This typically provides the best performance as it reduces the amount of data transferring across the PCI bus during inference. If the model does not fit entirely on one GPU, then it will be spread across all the available GPUs. + +## How can I enable Flash Attention? + +Flash Attention is a feature of most modern models that can significantly reduce memory usage as the context size grows. To enable Flash Attention, set the `OLLAMA_FLASH_ATTENTION` environment variable to `1` when starting the Ollama server. + +## How can I set the quantization type for the K/V cache? + +The K/V context cache can be quantized to significantly reduce memory usage when Flash Attention is enabled. + +To use quantized K/V cache with Ollama you can set the following environment variable: + +- `OLLAMA_KV_CACHE_TYPE` - The quantization type for the K/V cache. Default is `f16`. + +> Note: Currently this is a global option - meaning all models will run with the specified quantization type. + +The currently available K/V cache quantization types are: + +- `f16` - high precision and memory usage (default). +- `q8_0` - 8-bit quantization, uses approximately 1/2 the memory of `f16` with a very small loss in precision, this usually has no noticeable impact on the model's quality (recommended if not using f16). +- `q4_0` - 4-bit quantization, uses approximately 1/4 the memory of `f16` with a small-medium loss in precision that may be more noticeable at higher context sizes. + +How much the cache quantization impacts the model's response quality will depend on the model and the task. Models that have a high GQA count (e.g. Qwen2) may see a larger impact on precision from quantization than models with a low GQA count. + +You may need to experiment with different quantization types to find the best balance between memory usage and quality. diff --git a/docs/gpu.md b/docs/gpu.md new file mode 100644 index 0000000..b54c66a --- /dev/null +++ b/docs/gpu.md @@ -0,0 +1,119 @@ +# GPU +## Nvidia +Ollama supports Nvidia GPUs with compute capability 5.0+. + +Check your compute compatibility to see if your card is supported: +[https://developer.nvidia.com/cuda-gpus](https://developer.nvidia.com/cuda-gpus) + +| Compute Capability | Family | Cards | +| ------------------ | ------------------- | ----------------------------------------------------------------------------------------------------------- | +| 9.0 | NVIDIA | `H200` `H100` | +| 8.9 | GeForce RTX 40xx | `RTX 4090` `RTX 4080 SUPER` `RTX 4080` `RTX 4070 Ti SUPER` `RTX 4070 Ti` `RTX 4070 SUPER` `RTX 4070` `RTX 4060 Ti` `RTX 4060` | +| | NVIDIA Professional | `L4` `L40` `RTX 6000` | +| 8.6 | GeForce RTX 30xx | `RTX 3090 Ti` `RTX 3090` `RTX 3080 Ti` `RTX 3080` `RTX 3070 Ti` `RTX 3070` `RTX 3060 Ti` `RTX 3060` `RTX 3050 Ti` `RTX 3050` | +| | NVIDIA Professional | `A40` `RTX A6000` `RTX A5000` `RTX A4000` `RTX A3000` `RTX A2000` `A10` `A16` `A2` | +| 8.0 | NVIDIA | `A100` `A30` | +| 7.5 | GeForce GTX/RTX | `GTX 1650 Ti` `TITAN RTX` `RTX 2080 Ti` `RTX 2080` `RTX 2070` `RTX 2060` | +| | NVIDIA Professional | `T4` `RTX 5000` `RTX 4000` `RTX 3000` `T2000` `T1200` `T1000` `T600` `T500` | +| | Quadro | `RTX 8000` `RTX 6000` `RTX 5000` `RTX 4000` | +| 7.0 | NVIDIA | `TITAN V` `V100` `Quadro GV100` | +| 6.1 | NVIDIA TITAN | `TITAN Xp` `TITAN X` | +| | GeForce GTX | `GTX 1080 Ti` `GTX 1080` `GTX 1070 Ti` `GTX 1070` `GTX 1060` `GTX 1050 Ti` `GTX 1050` | +| | Quadro | `P6000` `P5200` `P4200` `P3200` `P5000` `P4000` `P3000` `P2200` `P2000` `P1000` `P620` `P600` `P500` `P520` | +| | Tesla | `P40` `P4` | +| 6.0 | NVIDIA | `Tesla P100` `Quadro GP100` | +| 5.2 | GeForce GTX | `GTX TITAN X` `GTX 980 Ti` `GTX 980` `GTX 970` `GTX 960` `GTX 950` | +| | Quadro | `M6000 24GB` `M6000` `M5000` `M5500M` `M4000` `M2200` `M2000` `M620` | +| | Tesla | `M60` `M40` | +| 5.0 | GeForce GTX | `GTX 750 Ti` `GTX 750` `NVS 810` | +| | Quadro | `K2200` `K1200` `K620` `M1200` `M520` `M5000M` `M4000M` `M3000M` `M2000M` `M1000M` `K620M` `M600M` `M500M` | + +For building locally to support older GPUs, see [developer.md](./development.md#linux-cuda-nvidia) + +### GPU Selection + +If you have multiple NVIDIA GPUs in your system and want to limit Ollama to use +a subset, you can set `CUDA_VISIBLE_DEVICES` to a comma separated list of GPUs. +Numeric IDs may be used, however ordering may vary, so UUIDs are more reliable. +You can discover the UUID of your GPUs by running `nvidia-smi -L` If you want to +ignore the GPUs and force CPU usage, use an invalid GPU ID (e.g., "-1") + +### Linux Suspend Resume + +On linux, after a suspend/resume cycle, sometimes Ollama will fail to discover +your NVIDIA GPU, and fallback to running on the CPU. You can workaround this +driver bug by reloading the NVIDIA UVM driver with `sudo rmmod nvidia_uvm && +sudo modprobe nvidia_uvm` + +## AMD Radeon +Ollama supports the following AMD GPUs: + +### Linux Support +| Family | Cards and accelerators | +| -------------- | ---------------------------------------------------------------------------------------------------------------------------------------------- | +| AMD Radeon RX | `7900 XTX` `7900 XT` `7900 GRE` `7800 XT` `7700 XT` `7600 XT` `7600` `6950 XT` `6900 XTX` `6900XT` `6800 XT` `6800` `Vega 64` `Vega 56` | +| AMD Radeon PRO | `W7900` `W7800` `W7700` `W7600` `W7500` `W6900X` `W6800X Duo` `W6800X` `W6800` `V620` `V420` `V340` `V320` `Vega II Duo` `Vega II` `VII` `SSG` | +| AMD Instinct | `MI300X` `MI300A` `MI300` `MI250X` `MI250` `MI210` `MI200` `MI100` `MI60` `MI50` | + +### Windows Support +With ROCm v6.1, the following GPUs are supported on Windows. + +| Family | Cards and accelerators | +| -------------- | ---------------------------------------------------------------------------------------------------------------------------------------------- | +| AMD Radeon RX | `7900 XTX` `7900 XT` `7900 GRE` `7800 XT` `7700 XT` `7600 XT` `7600` `6950 XT` `6900 XTX` `6900XT` `6800 XT` `6800` | +| AMD Radeon PRO | `W7900` `W7800` `W7700` `W7600` `W7500` `W6900X` `W6800X Duo` `W6800X` `W6800` `V620` | + + +### Overrides on Linux +Ollama leverages the AMD ROCm library, which does not support all AMD GPUs. In +some cases you can force the system to try to use a similar LLVM target that is +close. For example The Radeon RX 5400 is `gfx1034` (also known as 10.3.4) +however, ROCm does not currently support this target. The closest support is +`gfx1030`. You can use the environment variable `HSA_OVERRIDE_GFX_VERSION` with +`x.y.z` syntax. So for example, to force the system to run on the RX 5400, you +would set `HSA_OVERRIDE_GFX_VERSION="10.3.0"` as an environment variable for the +server. If you have an unsupported AMD GPU you can experiment using the list of +supported types below. + +If you have multiple GPUs with different GFX versions, append the numeric device +number to the environment variable to set them individually. For example, +`HSA_OVERRIDE_GFX_VERSION_0=10.3.0` and `HSA_OVERRIDE_GFX_VERSION_1=11.0.0` + +At this time, the known supported GPU types on linux are the following LLVM Targets. +This table shows some example GPUs that map to these LLVM targets: +| **LLVM Target** | **An Example GPU** | +|-----------------|---------------------| +| gfx900 | Radeon RX Vega 56 | +| gfx906 | Radeon Instinct MI50 | +| gfx908 | Radeon Instinct MI100 | +| gfx90a | Radeon Instinct MI210 | +| gfx940 | Radeon Instinct MI300 | +| gfx941 | | +| gfx942 | | +| gfx1030 | Radeon PRO V620 | +| gfx1100 | Radeon PRO W7900 | +| gfx1101 | Radeon PRO W7700 | +| gfx1102 | Radeon RX 7600 | + +AMD is working on enhancing ROCm v6 to broaden support for families of GPUs in a +future release which should increase support for more GPUs. + +Reach out on [Discord](https://discord.gg/ollama) or file an +[issue](https://github.com/ollama/ollama/issues) for additional help. + +### GPU Selection + +If you have multiple AMD GPUs in your system and want to limit Ollama to use a +subset, you can set `ROCR_VISIBLE_DEVICES` to a comma separated list of GPUs. +You can see the list of devices with `rocminfo`. If you want to ignore the GPUs +and force CPU usage, use an invalid GPU ID (e.g., "-1"). When available, use the +`Uuid` to uniquely identify the device instead of numeric value. + +### Container Permission + +In some Linux distributions, SELinux can prevent containers from +accessing the AMD GPU devices. On the host system you can run +`sudo setsebool container_use_devices=1` to allow containers to use devices. + +### Metal (Apple GPUs) +Ollama supports GPU acceleration on Apple devices via the Metal API. diff --git a/docs/images/ollama-keys.png b/docs/images/ollama-keys.png new file mode 100644 index 0000000..942079a Binary files /dev/null and b/docs/images/ollama-keys.png differ diff --git a/docs/images/signup.png b/docs/images/signup.png new file mode 100644 index 0000000..e80bb4e Binary files /dev/null and b/docs/images/signup.png differ diff --git a/docs/import.md b/docs/import.md new file mode 100644 index 0000000..01fea54 --- /dev/null +++ b/docs/import.md @@ -0,0 +1,184 @@ +# Importing a model + +## Table of Contents + + * [Importing a Safetensors adapter](#Importing-a-fine-tuned-adapter-from-Safetensors-weights) + * [Importing a Safetensors model](#Importing-a-model-from-Safetensors-weights) + * [Importing a GGUF file](#Importing-a-GGUF-based-model-or-adapter) + * [Sharing models on ollama.com](#Sharing-your-model-on-ollamacom) + +## Importing a fine tuned adapter from Safetensors weights + +First, create a `Modelfile` with a `FROM` command pointing at the base model you used for fine tuning, and an `ADAPTER` command which points to the directory with your Safetensors adapter: + +```dockerfile +FROM +ADAPTER /path/to/safetensors/adapter/directory +``` + +Make sure that you use the same base model in the `FROM` command as you used to create the adapter otherwise you will get erratic results. Most frameworks use different quantization methods, so it's best to use non-quantized (i.e. non-QLoRA) adapters. If your adapter is in the same directory as your `Modelfile`, use `ADAPTER .` to specify the adapter path. + +Now run `ollama create` from the directory where the `Modelfile` was created: + +```shell +ollama create my-model +``` + +Lastly, test the model: + +```shell +ollama run my-model +``` + +Ollama supports importing adapters based on several different model architectures including: + + * Llama (including Llama 2, Llama 3, Llama 3.1, and Llama 3.2); + * Mistral (including Mistral 1, Mistral 2, and Mixtral); and + * Gemma (including Gemma 1 and Gemma 2) + +You can create the adapter using a fine tuning framework or tool which can output adapters in the Safetensors format, such as: + + * Hugging Face [fine tuning framework](https://huggingface.co/docs/transformers/en/training) + * [Unsloth](https://github.com/unslothai/unsloth) + * [MLX](https://github.com/ml-explore/mlx) + + +## Importing a model from Safetensors weights + +First, create a `Modelfile` with a `FROM` command which points to the directory containing your Safetensors weights: + +```dockerfile +FROM /path/to/safetensors/directory +``` + +If you create the Modelfile in the same directory as the weights, you can use the command `FROM .`. + +Now run the `ollama create` command from the directory where you created the `Modelfile`: + +```shell +ollama create my-model +``` + +Lastly, test the model: + +```shell +ollama run my-model +``` + +Ollama supports importing models for several different architectures including: + + * Llama (including Llama 2, Llama 3, Llama 3.1, and Llama 3.2); + * Mistral (including Mistral 1, Mistral 2, and Mixtral); + * Gemma (including Gemma 1 and Gemma 2); and + * Phi3 + +This includes importing foundation models as well as any fine tuned models which have been _fused_ with a foundation model. +## Importing a GGUF based model or adapter + +If you have a GGUF based model or adapter it is possible to import it into Ollama. You can obtain a GGUF model or adapter by: + + * converting a Safetensors model with the `convert_hf_to_gguf.py` from Llama.cpp; + * converting a Safetensors adapter with the `convert_lora_to_gguf.py` from Llama.cpp; or + * downloading a model or adapter from a place such as HuggingFace + +To import a GGUF model, create a `Modelfile` containing: + +```dockerfile +FROM /path/to/file.gguf +``` + +For a GGUF adapter, create the `Modelfile` with: + +```dockerfile +FROM +ADAPTER /path/to/file.gguf +``` + +When importing a GGUF adapter, it's important to use the same base model as the base model that the adapter was created with. You can use: + + * a model from Ollama + * a GGUF file + * a Safetensors based model + +Once you have created your `Modelfile`, use the `ollama create` command to build the model. + +```shell +ollama create my-model +``` + +## Quantizing a Model + +Quantizing a model allows you to run models faster and with less memory consumption but at reduced accuracy. This allows you to run a model on more modest hardware. + +Ollama can quantize FP16 and FP32 based models into different quantization levels using the `-q/--quantize` flag with the `ollama create` command. + +First, create a Modelfile with the FP16 or FP32 based model you wish to quantize. + +```dockerfile +FROM /path/to/my/gemma/f16/model +``` + +Use `ollama create` to then create the quantized model. + +```shell +$ ollama create --quantize q4_K_M mymodel +transferring model data +quantizing F16 model to Q4_K_M +creating new layer sha256:735e246cc1abfd06e9cdcf95504d6789a6cd1ad7577108a70d9902fef503c1bd +creating new layer sha256:0853f0ad24e5865173bbf9ffcc7b0f5d56b66fd690ab1009867e45e7d2c4db0f +writing manifest +success +``` + +### Supported Quantizations + +- `q4_0` +- `q4_1` +- `q5_0` +- `q5_1` +- `q8_0` + +#### K-means Quantizations + +- `q3_K_S` +- `q3_K_M` +- `q3_K_L` +- `q4_K_S` +- `q4_K_M` +- `q5_K_S` +- `q5_K_M` +- `q6_K` + + +## Sharing your model on ollama.com + +You can share any model you have created by pushing it to [ollama.com](https://ollama.com) so that other users can try it out. + +First, use your browser to go to the [Ollama Sign-Up](https://ollama.com/signup) page. If you already have an account, you can skip this step. + +Sign-Up + +The `Username` field will be used as part of your model's name (e.g. `jmorganca/mymodel`), so make sure you are comfortable with the username that you have selected. + +Now that you have created an account and are signed-in, go to the [Ollama Keys Settings](https://ollama.com/settings/keys) page. + +Follow the directions on the page to determine where your Ollama Public Key is located. + +Ollama Keys + +Click on the `Add Ollama Public Key` button, and copy and paste the contents of your Ollama Public Key into the text field. + +To push a model to [ollama.com](https://ollama.com), first make sure that it is named correctly with your username. You may have to use the `ollama cp` command to copy +your model to give it the correct name. Once you're happy with your model's name, use the `ollama push` command to push it to [ollama.com](https://ollama.com). + +```shell +ollama cp mymodel myuser/mymodel +ollama push myuser/mymodel +``` + +Once your model has been pushed, other users can pull and run it by using the command: + +```shell +ollama run myuser/mymodel +``` + diff --git a/docs/linux.md b/docs/linux.md new file mode 100644 index 0000000..2dda87f --- /dev/null +++ b/docs/linux.md @@ -0,0 +1,194 @@ +# Linux + +## Install + +To install Ollama, run the following command: + +```shell +curl -fsSL https://ollama.com/install.sh | sh +``` + +## Manual install + +> [!NOTE] +> If you are upgrading from a prior version, you should remove the old libraries with `sudo rm -rf /usr/lib/ollama` first. + +Download and extract the package: + +```shell +curl -L https://ollama.com/download/ollama-linux-amd64.tgz -o ollama-linux-amd64.tgz +sudo tar -C /usr -xzf ollama-linux-amd64.tgz +``` + +Start Ollama: + +```shell +ollama serve +``` + +In another terminal, verify that Ollama is running: + +```shell +ollama -v +``` + +### AMD GPU install + +If you have an AMD GPU, also download and extract the additional ROCm package: + +```shell +curl -L https://ollama.com/download/ollama-linux-amd64-rocm.tgz -o ollama-linux-amd64-rocm.tgz +sudo tar -C /usr -xzf ollama-linux-amd64-rocm.tgz +``` + +### ARM64 install + +Download and extract the ARM64-specific package: + +```shell +curl -L https://ollama.com/download/ollama-linux-arm64.tgz -o ollama-linux-arm64.tgz +sudo tar -C /usr -xzf ollama-linux-arm64.tgz +``` + +### Adding Ollama as a startup service (recommended) + +Create a user and group for Ollama: + +```shell +sudo useradd -r -s /bin/false -U -m -d /usr/share/ollama ollama +sudo usermod -a -G ollama $(whoami) +``` + +Create a service file in `/etc/systemd/system/ollama.service`: + +```ini +[Unit] +Description=Ollama Service +After=network-online.target + +[Service] +ExecStart=/usr/bin/ollama serve +User=ollama +Group=ollama +Restart=always +RestartSec=3 +Environment="PATH=$PATH" + +[Install] +WantedBy=multi-user.target +``` + +Then start the service: + +```shell +sudo systemctl daemon-reload +sudo systemctl enable ollama +``` + +### Install CUDA drivers (optional) + +[Download and install](https://developer.nvidia.com/cuda-downloads) CUDA. + +Verify that the drivers are installed by running the following command, which should print details about your GPU: + +```shell +nvidia-smi +``` + +### Install AMD ROCm drivers (optional) + +[Download and Install](https://rocm.docs.amd.com/projects/install-on-linux/en/latest/tutorial/quick-start.html) ROCm v6. + +### Start Ollama + +Start Ollama and verify it is running: + +```shell +sudo systemctl start ollama +sudo systemctl status ollama +``` + +> [!NOTE] +> While AMD has contributed the `amdgpu` driver upstream to the official linux +> kernel source, the version is older and may not support all ROCm features. We +> recommend you install the latest driver from +> https://www.amd.com/en/support/linux-drivers for best support of your Radeon +> GPU. + +## Customizing + +To customize the installation of Ollama, you can edit the systemd service file or the environment variables by running: + +```shell +sudo systemctl edit ollama +``` + +Alternatively, create an override file manually in `/etc/systemd/system/ollama.service.d/override.conf`: + +```ini +[Service] +Environment="OLLAMA_DEBUG=1" +``` + +## Updating + +Update Ollama by running the install script again: + +```shell +curl -fsSL https://ollama.com/install.sh | sh +``` + +Or by re-downloading Ollama: + +```shell +curl -L https://ollama.com/download/ollama-linux-amd64.tgz -o ollama-linux-amd64.tgz +sudo tar -C /usr -xzf ollama-linux-amd64.tgz +``` + +## Installing specific versions + +Use `OLLAMA_VERSION` environment variable with the install script to install a specific version of Ollama, including pre-releases. You can find the version numbers in the [releases page](https://github.com/ollama/ollama/releases). + +For example: + +```shell +curl -fsSL https://ollama.com/install.sh | OLLAMA_VERSION=0.5.7 sh +``` + +## Viewing logs + +To view logs of Ollama running as a startup service, run: + +```shell +journalctl -e -u ollama +``` + +## Uninstall + +Remove the ollama service: + +```shell +sudo systemctl stop ollama +sudo systemctl disable ollama +sudo rm /etc/systemd/system/ollama.service +``` + +Remove the ollama binary from your bin directory (either `/usr/local/bin`, `/usr/bin`, or `/bin`): + +```shell +sudo rm $(which ollama) +``` + +Remove the downloaded models and Ollama service user and group: + +```shell +sudo rm -r /usr/share/ollama +sudo userdel ollama +sudo groupdel ollama +``` + +Remove installed libraries: + +```shell +sudo rm -rf /usr/local/lib/ollama +``` diff --git a/docs/modelfile.md b/docs/modelfile.md new file mode 100644 index 0000000..6513873 --- /dev/null +++ b/docs/modelfile.md @@ -0,0 +1,258 @@ +# Ollama Model File + +> [!NOTE] +> `Modelfile` syntax is in development + +A model file is the blueprint to create and share models with Ollama. + +## Table of Contents + +- [Format](#format) +- [Examples](#examples) +- [Instructions](#instructions) + - [FROM (Required)](#from-required) + - [Build from existing model](#build-from-existing-model) + - [Build from a Safetensors model](#build-from-a-safetensors-model) + - [Build from a GGUF file](#build-from-a-gguf-file) + - [PARAMETER](#parameter) + - [Valid Parameters and Values](#valid-parameters-and-values) + - [TEMPLATE](#template) + - [Template Variables](#template-variables) + - [SYSTEM](#system) + - [ADAPTER](#adapter) + - [LICENSE](#license) + - [MESSAGE](#message) +- [Notes](#notes) + +## Format + +The format of the `Modelfile`: + +``` +# comment +INSTRUCTION arguments +``` + +| Instruction | Description | +| ----------------------------------- | -------------------------------------------------------------- | +| [`FROM`](#from-required) (required) | Defines the base model to use. | +| [`PARAMETER`](#parameter) | Sets the parameters for how Ollama will run the model. | +| [`TEMPLATE`](#template) | The full prompt template to be sent to the model. | +| [`SYSTEM`](#system) | Specifies the system message that will be set in the template. | +| [`ADAPTER`](#adapter) | Defines the (Q)LoRA adapters to apply to the model. | +| [`LICENSE`](#license) | Specifies the legal license. | +| [`MESSAGE`](#message) | Specify message history. | + +## Examples + +### Basic `Modelfile` + +An example of a `Modelfile` creating a mario blueprint: + +``` +FROM llama3.2 +# sets the temperature to 1 [higher is more creative, lower is more coherent] +PARAMETER temperature 1 +# sets the context window size to 4096, this controls how many tokens the LLM can use as context to generate the next token +PARAMETER num_ctx 4096 + +# sets a custom system message to specify the behavior of the chat assistant +SYSTEM You are Mario from super mario bros, acting as an assistant. +``` + +To use this: + +1. Save it as a file (e.g. `Modelfile`) +2. `ollama create choose-a-model-name -f ` +3. `ollama run choose-a-model-name` +4. Start using the model! + +To view the Modelfile of a given model, use the `ollama show --modelfile` command. + +```shell +ollama show --modelfile llama3.2 +``` + +> **Output**: +> +> ``` +> # Modelfile generated by "ollama show" +> # To build a new Modelfile based on this one, replace the FROM line with: +> # FROM llama3.2:latest +> FROM /Users/pdevine/.ollama/models/blobs/sha256-00e1317cbf74d901080d7100f57580ba8dd8de57203072dc6f668324ba545f29 +> TEMPLATE """{{ if .System }}<|start_header_id|>system<|end_header_id|> +> +> {{ .System }}<|eot_id|>{{ end }}{{ if .Prompt }}<|start_header_id|>user<|end_header_id|> +> +> {{ .Prompt }}<|eot_id|>{{ end }}<|start_header_id|>assistant<|end_header_id|> +> +> {{ .Response }}<|eot_id|>""" +> PARAMETER stop "<|start_header_id|>" +> PARAMETER stop "<|end_header_id|>" +> PARAMETER stop "<|eot_id|>" +> PARAMETER stop "<|reserved_special_token" +> ``` + + +## Instructions + +### FROM (Required) + +The `FROM` instruction defines the base model to use when creating a model. + +``` +FROM : +``` + +#### Build from existing model + +``` +FROM llama3.2 +``` + +A list of available base models: + +Additional models can be found at: + + +#### Build from a Safetensors model + +``` +FROM +``` + +The model directory should contain the Safetensors weights for a supported architecture. + +Currently supported model architectures: + * Llama (including Llama 2, Llama 3, Llama 3.1, and Llama 3.2) + * Mistral (including Mistral 1, Mistral 2, and Mixtral) + * Gemma (including Gemma 1 and Gemma 2) + * Phi3 + +#### Build from a GGUF file + +``` +FROM ./ollama-model.gguf +``` + +The GGUF file location should be specified as an absolute path or relative to the `Modelfile` location. + + +### PARAMETER + +The `PARAMETER` instruction defines a parameter that can be set when the model is run. + +``` +PARAMETER +``` + +#### Valid Parameters and Values + +| Parameter | Description | Value Type | Example Usage | +| -------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------- | -------------------- | +| num_ctx | Sets the size of the context window used to generate the next token. (Default: 2048) | int | num_ctx 4096 | +| repeat_last_n | Sets how far back for the model to look back to prevent repetition. (Default: 64, 0 = disabled, -1 = num_ctx) | int | repeat_last_n 64 | +| repeat_penalty | Sets how strongly to penalize repetitions. A higher value (e.g., 1.5) will penalize repetitions more strongly, while a lower value (e.g., 0.9) will be more lenient. (Default: 1.1) | float | repeat_penalty 1.1 | +| temperature | The temperature of the model. Increasing the temperature will make the model answer more creatively. (Default: 0.8) | float | temperature 0.7 | +| seed | Sets the random number seed to use for generation. Setting this to a specific number will make the model generate the same text for the same prompt. (Default: 0) | int | seed 42 | +| stop | Sets the stop sequences to use. When this pattern is encountered the LLM will stop generating text and return. Multiple stop patterns may be set by specifying multiple separate `stop` parameters in a modelfile. | string | stop "AI assistant:" | +| num_predict | Maximum number of tokens to predict when generating text. (Default: -1, infinite generation) | int | num_predict 42 | +| top_k | Reduces the probability of generating nonsense. A higher value (e.g. 100) will give more diverse answers, while a lower value (e.g. 10) will be more conservative. (Default: 40) | int | top_k 40 | +| top_p | Works together with top-k. A higher value (e.g., 0.95) will lead to more diverse text, while a lower value (e.g., 0.5) will generate more focused and conservative text. (Default: 0.9) | float | top_p 0.9 | +| min_p | Alternative to the top_p, and aims to ensure a balance of quality and variety. The parameter *p* represents the minimum probability for a token to be considered, relative to the probability of the most likely token. For example, with *p*=0.05 and the most likely token having a probability of 0.9, logits with a value less than 0.045 are filtered out. (Default: 0.0) | float | min_p 0.05 | + +### TEMPLATE + +`TEMPLATE` of the full prompt template to be passed into the model. It may include (optionally) a system message, a user's message and the response from the model. Note: syntax may be model specific. Templates use Go [template syntax](https://pkg.go.dev/text/template). + +#### Template Variables + +| Variable | Description | +| ----------------- | --------------------------------------------------------------------------------------------- | +| `{{ .System }}` | The system message used to specify custom behavior. | +| `{{ .Prompt }}` | The user prompt message. | +| `{{ .Response }}` | The response from the model. When generating a response, text after this variable is omitted. | + +``` +TEMPLATE """{{ if .System }}<|im_start|>system +{{ .System }}<|im_end|> +{{ end }}{{ if .Prompt }}<|im_start|>user +{{ .Prompt }}<|im_end|> +{{ end }}<|im_start|>assistant +""" +``` + +### SYSTEM + +The `SYSTEM` instruction specifies the system message to be used in the template, if applicable. + +``` +SYSTEM """""" +``` + +### ADAPTER + +The `ADAPTER` instruction specifies a fine tuned LoRA adapter that should apply to the base model. The value of the adapter should be an absolute path or a path relative to the Modelfile. The base model should be specified with a `FROM` instruction. If the base model is not the same as the base model that the adapter was tuned from the behaviour will be erratic. + +#### Safetensor adapter + +``` +ADAPTER +``` + +Currently supported Safetensor adapters: + * Llama (including Llama 2, Llama 3, and Llama 3.1) + * Mistral (including Mistral 1, Mistral 2, and Mixtral) + * Gemma (including Gemma 1 and Gemma 2) + +#### GGUF adapter + +``` +ADAPTER ./ollama-lora.gguf +``` + +### LICENSE + +The `LICENSE` instruction allows you to specify the legal license under which the model used with this Modelfile is shared or distributed. + +``` +LICENSE """ + +""" +``` + +### MESSAGE + +The `MESSAGE` instruction allows you to specify a message history for the model to use when responding. Use multiple iterations of the MESSAGE command to build up a conversation which will guide the model to answer in a similar way. + +``` +MESSAGE +``` + +#### Valid roles + +| Role | Description | +| --------- | ------------------------------------------------------------ | +| system | Alternate way of providing the SYSTEM message for the model. | +| user | An example message of what the user could have asked. | +| assistant | An example message of how the model should respond. | + + +#### Example conversation + +``` +MESSAGE user Is Toronto in Canada? +MESSAGE assistant yes +MESSAGE user Is Sacramento in Canada? +MESSAGE assistant no +MESSAGE user Is Ontario in Canada? +MESSAGE assistant yes +``` + + +## Notes + +- the **`Modelfile` is not case sensitive**. In the examples, uppercase instructions are used to make it easier to distinguish it from arguments. +- Instructions can be in any order. In the examples, the `FROM` instruction is first to keep it easily readable. + +[1]: https://ollama.com/library diff --git a/docs/openai.md b/docs/openai.md new file mode 100644 index 0000000..d0bac4c --- /dev/null +++ b/docs/openai.md @@ -0,0 +1,368 @@ +# OpenAI compatibility + +> [!NOTE] +> OpenAI compatibility is experimental and is subject to major adjustments including breaking changes. For fully-featured access to the Ollama API, see the Ollama [Python library](https://github.com/ollama/ollama-python), [JavaScript library](https://github.com/ollama/ollama-js) and [REST API](https://github.com/ollama/ollama/blob/main/docs/api.md). + +Ollama provides experimental compatibility with parts of the [OpenAI API](https://platform.openai.com/docs/api-reference) to help connect existing applications to Ollama. + +## Usage + +### OpenAI Python library + +```python +from openai import OpenAI + +client = OpenAI( + base_url='http://localhost:11434/v1/', + + # required but ignored + api_key='ollama', +) + +chat_completion = client.chat.completions.create( + messages=[ + { + 'role': 'user', + 'content': 'Say this is a test', + } + ], + model='llama3.2', +) + +response = client.chat.completions.create( + model="llava", + messages=[ + { + "role": "user", + "content": [ + {"type": "text", "text": "What's in this image?"}, + { + "type": "image_url", + "image_url": "data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAG0AAABmCAYAAADBPx+VAAAACXBIWXMAAAsTAAALEwEAmpwYAAAAAXNSR0IArs4c6QAAAARnQU1BAACxjwv8YQUAAA3VSURBVHgB7Z27r0zdG8fX743i1bi1ikMoFMQloXRpKFFIqI7LH4BEQ+NWIkjQuSWCRIEoULk0gsK1kCBI0IhrQVT7tz/7zZo888yz1r7MnDl7z5xvsjkzs2fP3uu71nNfa7lkAsm7d++Sffv2JbNmzUqcc8m0adOSzZs3Z+/XES4ZckAWJEGWPiCxjsQNLWmQsWjRIpMseaxcuTKpG/7HP27I8P79e7dq1ars/yL4/v27S0ejqwv+cUOGEGGpKHR37tzJCEpHV9tnT58+dXXCJDdECBE2Ojrqjh071hpNECjx4cMHVycM1Uhbv359B2F79+51586daxN/+pyRkRFXKyRDAqxEp4yMlDDzXG1NPnnyJKkThoK0VFd1ELZu3TrzXKxKfW7dMBQ6bcuWLW2v0VlHjx41z717927ba22U9APcw7Nnz1oGEPeL3m3p2mTAYYnFmMOMXybPPXv2bNIPpFZr1NHn4HMw0KRBjg9NuRw95s8PEcz/6DZELQd/09C9QGq5RsmSRybqkwHGjh07OsJSsYYm3ijPpyHzoiacg35MLdDSIS/O1yM778jOTwYUkKNHWUzUWaOsylE00MyI0fcnOwIdjvtNdW/HZwNLGg+sR1kMepSNJXmIwxBZiG8tDTpEZzKg0GItNsosY8USkxDhD0Rinuiko2gfL/RbiD2LZAjU9zKQJj8RDR0vJBR1/Phx9+PHj9Z7REF4nTZkxzX4LCXHrV271qXkBAPGfP/atWvu/PnzHe4C97F48eIsRLZ9+3a3f/9+87dwP1JxaF7/3r17ba+5l4EcaVo0lj3SBq5kGTJSQmLWMjgYNei2GPT1MuMqGTDEFHzeQSP2wi/jGnkmPJ/nhccs44jvDAxpVcxnq0F6eT8h4ni/iIWpR5lPyA6ETkNXoSukvpJAD3AsXLiwpZs49+fPn5ke4j10TqYvegSfn0OnafC+Tv9ooA/JPkgQysqQNBzagXY55nO/oa1F7qvIPWkRL12WRpMWUvpVDYmxAPehxWSe8ZEXL20sadYIozfmNch4QJPAfeJgW3rNsnzphBKNJM2KKODo1rVOMRYik5ETy3ix4qWNI81qAAirizgMIc+yhTytx0JWZuNI03qsrgWlGtwjoS9XwgUhWGyhUaRZZQNNIEwCiXD16tXcAHUs79co0vSD8rrJCIW98pzvxpAWyyo3HYwqS0+H0BjStClcZJT5coMm6D2LOF8TolGJtK9fvyZpyiC5ePFi9nc/oJU4eiEP0jVoAnHa9wyJycITMP78+eMeP37sXrx44d6+fdt6f82aNdkx1pg9e3Zb5W+RSRE+n+VjksQWifvVaTKFhn5O8my63K8Qabdv33b379/PiAP//vuvW7BggZszZ072/+TJk91YgkafPn166zXB1rQHFvouAWHq9z3SEevSUerqCn2/dDCeta2jxYbr69evk4MHDyY7d+7MjhMnTiTPnz9Pfv/+nfQT2ggpO2dMF8cghuoM7Ygj5iWCqRlGFml0QC/ftGmTmzt3rmsaKDsgBSPh0/8yPeLLBihLkOKJc0jp8H8vUzcxIA1k6QJ/c78tWEyj5P3o4u9+jywNPdJi5rAH9x0KHcl4Hg570eQp3+vHXGyrmEeigzQsQsjavXt38ujRo44LQuDDhw+TW7duRS1HGgMxhNXHgflaNTOsHyKvHK5Ijo2jbFjJBQK9YwFd6RVMzfgRBmEfP37suBBm/p49e1qjEP2mwTViNRo0VJWH1deMXcNK08uUjVUu7s/zRaL+oLNxz1bpANco4npUgX4G2eFbpDFyQoQxojBCpEGSytmOH8qrH5Q9vuzD6ofQylkCUmh8DBAr+q8JCyVNtWQIidKQE9wNtLSQnS4jDSsxNHogzFuQBw4cyM61UKVsjfr3ooBkPSqqQHesUPWVtzi9/vQi1T+rJj7WiTz4Pt/l3LxUkr5P2VYZaZ4URpsE+st/dujQoaBBYokbrz/8TJNQYLSonrPS9kUaSkPeZyj1AWSj+d+VBoy1pIWVNed8P0Ll/ee5HdGRhrHhR5GGN0r4LGZBaj8oFDJitBTJzIZgFcmU0Y8ytWMZMzJOaXUSrUs5RxKnrxmbb5YXO9VGUhtpXldhEUogFr3IzIsvlpmdosVcGVGXFWp2oU9kLFL3dEkSz6NHEY1sjSRdIuDFWEhd8KxFqsRi1uM/nz9/zpxnwlESONdg6dKlbsaMGS4EHFHtjFIDHwKOo46l4TxSuxgDzi+rE2jg+BaFruOX4HXa0Nnf1lwAPufZeF8/r6zD97WK2qFnGjBxTw5qNGPxT+5T/r7/7RawFC3j4vTp09koCxkeHjqbHJqArmH5UrFKKksnxrK7FuRIs8STfBZv+luugXZ2pR/pP9Ois4z+TiMzUUkUjD0iEi1fzX8GmXyuxUBRcaUfykV0YZnlJGKQpOiGB76x5GeWkWWJc3mOrK6S7xdND+W5N6XyaRgtWJFe13GkaZnKOsYqGdOVVVbGupsyA/l7emTLHi7vwTdirNEt0qxnzAvBFcnQF16xh/TMpUuXHDowhlA9vQVraQhkudRdzOnK+04ZSP3DUhVSP61YsaLtd/ks7ZgtPcXqPqEafHkdqa84X6aCeL7YWlv6edGFHb+ZFICPlljHhg0bKuk0CSvVznWsotRu433alNdFrqG45ejoaPCaUkWERpLXjzFL2Rpllp7PJU2a/v7Ab8N05/9t27Z16KUqoFGsxnI9EosS2niSYg9SpU6B4JgTrvVW1flt1sT+0ADIJU2maXzcUTraGCRaL1Wp9rUMk16PMom8QhruxzvZIegJjFU7LLCePfS8uaQdPny4jTTL0dbee5mYokQsXTIWNY46kuMbnt8Kmec+LGWtOVIl9cT1rCB0V8WqkjAsRwta93TbwNYoGKsUSChN44lgBNCoHLHzquYKrU6qZ8lolCIN0Rh6cP0Q3U6I6IXILYOQI513hJaSKAorFpuHXJNfVlpRtmYBk1Su1obZr5dnKAO+L10Hrj3WZW+E3qh6IszE37F6EB+68mGpvKm4eb9bFrlzrok7fvr0Kfv727dvWRmdVTJHw0qiiCUSZ6wCK+7XL/AcsgNyL74DQQ730sv78Su7+t/A36MdY0sW5o40ahslXr58aZ5HtZB8GH64m9EmMZ7FpYw4T6QnrZfgenrhFxaSiSGXtPnz57e9TkNZLvTjeqhr734CNtrK41L40sUQckmj1lGKQ0rC37x544r8eNXRpnVE3ZZY7zXo8NomiO0ZUCj2uHz58rbXoZ6gc0uA+F6ZeKS/jhRDUq8MKrTho9fEkihMmhxtBI1DxKFY9XLpVcSkfoi8JGnToZO5sU5aiDQIW716ddt7ZLYtMQlhECdBGXZZMWldY5BHm5xgAroWj4C0hbYkSc/jBmggIrXJWlZM6pSETsEPGqZOndr2uuuR5rF169a2HoHPdurUKZM4CO1WTPqaDaAd+GFGKdIQkxAn9RuEWcTRyN2KSUgiSgF5aWzPTeA/lN5rZubMmR2bE4SIC4nJoltgAV/dVefZm72AtctUCJU2CMJ327hxY9t7EHbkyJFseq+EJSY16RPo3Dkq1kkr7+q0bNmyDuLQcZBEPYmHVdOBiJyIlrRDq41YPWfXOxUysi5fvtyaj+2BpcnsUV/oSoEMOk2CQGlr4ckhBwaetBhjCwH0ZHtJROPJkyc7UjcYLDjmrH7ADTEBXFfOYmB0k9oYBOjJ8b4aOYSe7QkKcYhFlq3QYLQhSidNmtS2RATwy8YOM3EQJsUjKiaWZ+vZToUQgzhkHXudb/PW5YMHD9yZM2faPsMwoc7RciYJXbGuBqJ1UIGKKLv915jsvgtJxCZDubdXr165mzdvtr1Hz5LONA8jrUwKPqsmVesKa49S3Q4WxmRPUEYdTjgiUcfUwLx589ySJUva3oMkP6IYddq6HMS4o55xBJBUeRjzfa4Zdeg56QZ43LhxoyPo7Lf1kNt7oO8wWAbNwaYjIv5lhyS7kRf96dvm5Jah8vfvX3flyhX35cuX6HfzFHOToS1H4BenCaHvO8pr8iDuwoUL7tevX+b5ZdbBair0xkFIlFDlW4ZknEClsp/TzXyAKVOmmHWFVSbDNw1l1+4f90U6IY/q4V27dpnE9bJ+v87QEydjqx/UamVVPRG+mwkNTYN+9tjkwzEx+atCm/X9WvWtDtAb68Wy9LXa1UmvCDDIpPkyOQ5ZwSzJ4jMrvFcr0rSjOUh+GcT4LSg5ugkW1Io0/SCDQBojh0hPlaJdah+tkVYrnTZowP8iq1F1TgMBBauufyB33x1v+NWFYmT5KmppgHC+NkAgbmRkpD3yn9QIseXymoTQFGQmIOKTxiZIWpvAatenVqRVXf2nTrAWMsPnKrMZHz6bJq5jvce6QK8J1cQNgKxlJapMPdZSR64/UivS9NztpkVEdKcrs5alhhWP9NeqlfWopzhZScI6QxseegZRGeg5a8C3Re1Mfl1ScP36ddcUaMuv24iOJtz7sbUjTS4qBvKmstYJoUauiuD3k5qhyr7QdUHMeCgLa1Ear9NquemdXgmum4fvJ6w1lqsuDhNrg1qSpleJK7K3TF0Q2jSd94uSZ60kK1e3qyVpQK6PVWXp2/FC3mp6jBhKKOiY2h3gtUV64TWM6wDETRPLDfSakXmH3w8g9Jlug8ZtTt4kVF0kLUYYmCCtD/DrQ5YhMGbA9L3ucdjh0y8kOHW5gU/VEEmJTcL4Pz/f7mgoAbYkAAAAAElFTkSuQmCC", + }, + ], + } + ], + max_tokens=300, +) + +completion = client.completions.create( + model="llama3.2", + prompt="Say this is a test", +) + +list_completion = client.models.list() + +model = client.models.retrieve("llama3.2") + +embeddings = client.embeddings.create( + model="all-minilm", + input=["why is the sky blue?", "why is the grass green?"], +) +``` + +#### Structured outputs + +```python +from pydantic import BaseModel +from openai import OpenAI + +client = OpenAI(base_url="http://localhost:11434/v1", api_key="ollama") + +# Define the schema for the response +class FriendInfo(BaseModel): + name: str + age: int + is_available: bool + +class FriendList(BaseModel): + friends: list[FriendInfo] + +try: + completion = client.beta.chat.completions.parse( + temperature=0, + model="llama3.1:8b", + messages=[ + {"role": "user", "content": "I have two friends. The first is Ollama 22 years old busy saving the world, and the second is Alonso 23 years old and wants to hang out. Return a list of friends in JSON format"} + ], + response_format=FriendList, + ) + + friends_response = completion.choices[0].message + if friends_response.parsed: + print(friends_response.parsed) + elif friends_response.refusal: + print(friends_response.refusal) +except Exception as e: + print(f"Error: {e}") +``` + +### OpenAI JavaScript library + +```javascript +import OpenAI from 'openai' + +const openai = new OpenAI({ + baseURL: 'http://localhost:11434/v1/', + + // required but ignored + apiKey: 'ollama', +}) + +const chatCompletion = await openai.chat.completions.create({ + messages: [{ role: 'user', content: 'Say this is a test' }], + model: 'llama3.2', +}) + +const response = await openai.chat.completions.create({ + model: "llava", + messages: [ + { + role: "user", + content: [ + { type: "text", text: "What's in this image?" }, + { + type: "image_url", + image_url: "data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAG0AAABmCAYAAADBPx+VAAAACXBIWXMAAAsTAAALEwEAmpwYAAAAAXNSR0IArs4c6QAAAARnQU1BAACxjwv8YQUAAA3VSURBVHgB7Z27r0zdG8fX743i1bi1ikMoFMQloXRpKFFIqI7LH4BEQ+NWIkjQuSWCRIEoULk0gsK1kCBI0IhrQVT7tz/7zZo888yz1r7MnDl7z5xvsjkzs2fP3uu71nNfa7lkAsm7d++Sffv2JbNmzUqcc8m0adOSzZs3Z+/XES4ZckAWJEGWPiCxjsQNLWmQsWjRIpMseaxcuTKpG/7HP27I8P79e7dq1ars/yL4/v27S0ejqwv+cUOGEGGpKHR37tzJCEpHV9tnT58+dXXCJDdECBE2Ojrqjh071hpNECjx4cMHVycM1Uhbv359B2F79+51586daxN/+pyRkRFXKyRDAqxEp4yMlDDzXG1NPnnyJKkThoK0VFd1ELZu3TrzXKxKfW7dMBQ6bcuWLW2v0VlHjx41z717927ba22U9APcw7Nnz1oGEPeL3m3p2mTAYYnFmMOMXybPPXv2bNIPpFZr1NHn4HMw0KRBjg9NuRw95s8PEcz/6DZELQd/09C9QGq5RsmSRybqkwHGjh07OsJSsYYm3ijPpyHzoiacg35MLdDSIS/O1yM778jOTwYUkKNHWUzUWaOsylE00MyI0fcnOwIdjvtNdW/HZwNLGg+sR1kMepSNJXmIwxBZiG8tDTpEZzKg0GItNsosY8USkxDhD0Rinuiko2gfL/RbiD2LZAjU9zKQJj8RDR0vJBR1/Phx9+PHj9Z7REF4nTZkxzX4LCXHrV271qXkBAPGfP/atWvu/PnzHe4C97F48eIsRLZ9+3a3f/9+87dwP1JxaF7/3r17ba+5l4EcaVo0lj3SBq5kGTJSQmLWMjgYNei2GPT1MuMqGTDEFHzeQSP2wi/jGnkmPJ/nhccs44jvDAxpVcxnq0F6eT8h4ni/iIWpR5lPyA6ETkNXoSukvpJAD3AsXLiwpZs49+fPn5ke4j10TqYvegSfn0OnafC+Tv9ooA/JPkgQysqQNBzagXY55nO/oa1F7qvIPWkRL12WRpMWUvpVDYmxAPehxWSe8ZEXL20sadYIozfmNch4QJPAfeJgW3rNsnzphBKNJM2KKODo1rVOMRYik5ETy3ix4qWNI81qAAirizgMIc+yhTytx0JWZuNI03qsrgWlGtwjoS9XwgUhWGyhUaRZZQNNIEwCiXD16tXcAHUs79co0vSD8rrJCIW98pzvxpAWyyo3HYwqS0+H0BjStClcZJT5coMm6D2LOF8TolGJtK9fvyZpyiC5ePFi9nc/oJU4eiEP0jVoAnHa9wyJycITMP78+eMeP37sXrx44d6+fdt6f82aNdkx1pg9e3Zb5W+RSRE+n+VjksQWifvVaTKFhn5O8my63K8Qabdv33b379/PiAP//vuvW7BggZszZ072/+TJk91YgkafPn166zXB1rQHFvouAWHq9z3SEevSUerqCn2/dDCeta2jxYbr69evk4MHDyY7d+7MjhMnTiTPnz9Pfv/+nfQT2ggpO2dMF8cghuoM7Ygj5iWCqRlGFml0QC/ftGmTmzt3rmsaKDsgBSPh0/8yPeLLBihLkOKJc0jp8H8vUzcxIA1k6QJ/c78tWEyj5P3o4u9+jywNPdJi5rAH9x0KHcl4Hg570eQp3+vHXGyrmEeigzQsQsjavXt38ujRo44LQuDDhw+TW7duRS1HGgMxhNXHgflaNTOsHyKvHK5Ijo2jbFjJBQK9YwFd6RVMzfgRBmEfP37suBBm/p49e1qjEP2mwTViNRo0VJWH1deMXcNK08uUjVUu7s/zRaL+oLNxz1bpANco4npUgX4G2eFbpDFyQoQxojBCpEGSytmOH8qrH5Q9vuzD6ofQylkCUmh8DBAr+q8JCyVNtWQIidKQE9wNtLSQnS4jDSsxNHogzFuQBw4cyM61UKVsjfr3ooBkPSqqQHesUPWVtzi9/vQi1T+rJj7WiTz4Pt/l3LxUkr5P2VYZaZ4URpsE+st/dujQoaBBYokbrz/8TJNQYLSonrPS9kUaSkPeZyj1AWSj+d+VBoy1pIWVNed8P0Ll/ee5HdGRhrHhR5GGN0r4LGZBaj8oFDJitBTJzIZgFcmU0Y8ytWMZMzJOaXUSrUs5RxKnrxmbb5YXO9VGUhtpXldhEUogFr3IzIsvlpmdosVcGVGXFWp2oU9kLFL3dEkSz6NHEY1sjSRdIuDFWEhd8KxFqsRi1uM/nz9/zpxnwlESONdg6dKlbsaMGS4EHFHtjFIDHwKOo46l4TxSuxgDzi+rE2jg+BaFruOX4HXa0Nnf1lwAPufZeF8/r6zD97WK2qFnGjBxTw5qNGPxT+5T/r7/7RawFC3j4vTp09koCxkeHjqbHJqArmH5UrFKKksnxrK7FuRIs8STfBZv+luugXZ2pR/pP9Ois4z+TiMzUUkUjD0iEi1fzX8GmXyuxUBRcaUfykV0YZnlJGKQpOiGB76x5GeWkWWJc3mOrK6S7xdND+W5N6XyaRgtWJFe13GkaZnKOsYqGdOVVVbGupsyA/l7emTLHi7vwTdirNEt0qxnzAvBFcnQF16xh/TMpUuXHDowhlA9vQVraQhkudRdzOnK+04ZSP3DUhVSP61YsaLtd/ks7ZgtPcXqPqEafHkdqa84X6aCeL7YWlv6edGFHb+ZFICPlljHhg0bKuk0CSvVznWsotRu433alNdFrqG45ejoaPCaUkWERpLXjzFL2Rpllp7PJU2a/v7Ab8N05/9t27Z16KUqoFGsxnI9EosS2niSYg9SpU6B4JgTrvVW1flt1sT+0ADIJU2maXzcUTraGCRaL1Wp9rUMk16PMom8QhruxzvZIegJjFU7LLCePfS8uaQdPny4jTTL0dbee5mYokQsXTIWNY46kuMbnt8Kmec+LGWtOVIl9cT1rCB0V8WqkjAsRwta93TbwNYoGKsUSChN44lgBNCoHLHzquYKrU6qZ8lolCIN0Rh6cP0Q3U6I6IXILYOQI513hJaSKAorFpuHXJNfVlpRtmYBk1Su1obZr5dnKAO+L10Hrj3WZW+E3qh6IszE37F6EB+68mGpvKm4eb9bFrlzrok7fvr0Kfv727dvWRmdVTJHw0qiiCUSZ6wCK+7XL/AcsgNyL74DQQ730sv78Su7+t/A36MdY0sW5o40ahslXr58aZ5HtZB8GH64m9EmMZ7FpYw4T6QnrZfgenrhFxaSiSGXtPnz57e9TkNZLvTjeqhr734CNtrK41L40sUQckmj1lGKQ0rC37x544r8eNXRpnVE3ZZY7zXo8NomiO0ZUCj2uHz58rbXoZ6gc0uA+F6ZeKS/jhRDUq8MKrTho9fEkihMmhxtBI1DxKFY9XLpVcSkfoi8JGnToZO5sU5aiDQIW716ddt7ZLYtMQlhECdBGXZZMWldY5BHm5xgAroWj4C0hbYkSc/jBmggIrXJWlZM6pSETsEPGqZOndr2uuuR5rF169a2HoHPdurUKZM4CO1WTPqaDaAd+GFGKdIQkxAn9RuEWcTRyN2KSUgiSgF5aWzPTeA/lN5rZubMmR2bE4SIC4nJoltgAV/dVefZm72AtctUCJU2CMJ327hxY9t7EHbkyJFseq+EJSY16RPo3Dkq1kkr7+q0bNmyDuLQcZBEPYmHVdOBiJyIlrRDq41YPWfXOxUysi5fvtyaj+2BpcnsUV/oSoEMOk2CQGlr4ckhBwaetBhjCwH0ZHtJROPJkyc7UjcYLDjmrH7ADTEBXFfOYmB0k9oYBOjJ8b4aOYSe7QkKcYhFlq3QYLQhSidNmtS2RATwy8YOM3EQJsUjKiaWZ+vZToUQgzhkHXudb/PW5YMHD9yZM2faPsMwoc7RciYJXbGuBqJ1UIGKKLv915jsvgtJxCZDubdXr165mzdvtr1Hz5LONA8jrUwKPqsmVesKa49S3Q4WxmRPUEYdTjgiUcfUwLx589ySJUva3oMkP6IYddq6HMS4o55xBJBUeRjzfa4Zdeg56QZ43LhxoyPo7Lf1kNt7oO8wWAbNwaYjIv5lhyS7kRf96dvm5Jah8vfvX3flyhX35cuX6HfzFHOToS1H4BenCaHvO8pr8iDuwoUL7tevX+b5ZdbBair0xkFIlFDlW4ZknEClsp/TzXyAKVOmmHWFVSbDNw1l1+4f90U6IY/q4V27dpnE9bJ+v87QEydjqx/UamVVPRG+mwkNTYN+9tjkwzEx+atCm/X9WvWtDtAb68Wy9LXa1UmvCDDIpPkyOQ5ZwSzJ4jMrvFcr0rSjOUh+GcT4LSg5ugkW1Io0/SCDQBojh0hPlaJdah+tkVYrnTZowP8iq1F1TgMBBauufyB33x1v+NWFYmT5KmppgHC+NkAgbmRkpD3yn9QIseXymoTQFGQmIOKTxiZIWpvAatenVqRVXf2nTrAWMsPnKrMZHz6bJq5jvce6QK8J1cQNgKxlJapMPdZSR64/UivS9NztpkVEdKcrs5alhhWP9NeqlfWopzhZScI6QxseegZRGeg5a8C3Re1Mfl1ScP36ddcUaMuv24iOJtz7sbUjTS4qBvKmstYJoUauiuD3k5qhyr7QdUHMeCgLa1Ear9NquemdXgmum4fvJ6w1lqsuDhNrg1qSpleJK7K3TF0Q2jSd94uSZ60kK1e3qyVpQK6PVWXp2/FC3mp6jBhKKOiY2h3gtUV64TWM6wDETRPLDfSakXmH3w8g9Jlug8ZtTt4kVF0kLUYYmCCtD/DrQ5YhMGbA9L3ucdjh0y8kOHW5gU/VEEmJTcL4Pz/f7mgoAbYkAAAAAElFTkSuQmCC", + }, + ], + }, + ], +}) + +const completion = await openai.completions.create({ + model: "llama3.2", + prompt: "Say this is a test.", +}) + +const listCompletion = await openai.models.list() + +const model = await openai.models.retrieve("llama3.2") + +const embedding = await openai.embeddings.create({ + model: "all-minilm", + input: ["why is the sky blue?", "why is the grass green?"], +}) +``` + +### `curl` + +```shell +curl http://localhost:11434/v1/chat/completions \ + -H "Content-Type: application/json" \ + -d '{ + "model": "llama3.2", + "messages": [ + { + "role": "system", + "content": "You are a helpful assistant." + }, + { + "role": "user", + "content": "Hello!" + } + ] + }' + +curl http://localhost:11434/v1/chat/completions \ + -H "Content-Type: application/json" \ + -d '{ + "model": "llava", + "messages": [ + { + "role": "user", + "content": [ + { + "type": "text", + "text": "What'\''s in this image?" + }, + { + "type": "image_url", + "image_url": { + "url": "data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAG0AAABmCAYAAADBPx+VAAAACXBIWXMAAAsTAAALEwEAmpwYAAAAAXNSR0IArs4c6QAAAARnQU1BAACxjwv8YQUAAA3VSURBVHgB7Z27r0zdG8fX743i1bi1ikMoFMQloXRpKFFIqI7LH4BEQ+NWIkjQuSWCRIEoULk0gsK1kCBI0IhrQVT7tz/7zZo888yz1r7MnDl7z5xvsjkzs2fP3uu71nNfa7lkAsm7d++Sffv2JbNmzUqcc8m0adOSzZs3Z+/XES4ZckAWJEGWPiCxjsQNLWmQsWjRIpMseaxcuTKpG/7HP27I8P79e7dq1ars/yL4/v27S0ejqwv+cUOGEGGpKHR37tzJCEpHV9tnT58+dXXCJDdECBE2Ojrqjh071hpNECjx4cMHVycM1Uhbv359B2F79+51586daxN/+pyRkRFXKyRDAqxEp4yMlDDzXG1NPnnyJKkThoK0VFd1ELZu3TrzXKxKfW7dMBQ6bcuWLW2v0VlHjx41z717927ba22U9APcw7Nnz1oGEPeL3m3p2mTAYYnFmMOMXybPPXv2bNIPpFZr1NHn4HMw0KRBjg9NuRw95s8PEcz/6DZELQd/09C9QGq5RsmSRybqkwHGjh07OsJSsYYm3ijPpyHzoiacg35MLdDSIS/O1yM778jOTwYUkKNHWUzUWaOsylE00MyI0fcnOwIdjvtNdW/HZwNLGg+sR1kMepSNJXmIwxBZiG8tDTpEZzKg0GItNsosY8USkxDhD0Rinuiko2gfL/RbiD2LZAjU9zKQJj8RDR0vJBR1/Phx9+PHj9Z7REF4nTZkxzX4LCXHrV271qXkBAPGfP/atWvu/PnzHe4C97F48eIsRLZ9+3a3f/9+87dwP1JxaF7/3r17ba+5l4EcaVo0lj3SBq5kGTJSQmLWMjgYNei2GPT1MuMqGTDEFHzeQSP2wi/jGnkmPJ/nhccs44jvDAxpVcxnq0F6eT8h4ni/iIWpR5lPyA6ETkNXoSukvpJAD3AsXLiwpZs49+fPn5ke4j10TqYvegSfn0OnafC+Tv9ooA/JPkgQysqQNBzagXY55nO/oa1F7qvIPWkRL12WRpMWUvpVDYmxAPehxWSe8ZEXL20sadYIozfmNch4QJPAfeJgW3rNsnzphBKNJM2KKODo1rVOMRYik5ETy3ix4qWNI81qAAirizgMIc+yhTytx0JWZuNI03qsrgWlGtwjoS9XwgUhWGyhUaRZZQNNIEwCiXD16tXcAHUs79co0vSD8rrJCIW98pzvxpAWyyo3HYwqS0+H0BjStClcZJT5coMm6D2LOF8TolGJtK9fvyZpyiC5ePFi9nc/oJU4eiEP0jVoAnHa9wyJycITMP78+eMeP37sXrx44d6+fdt6f82aNdkx1pg9e3Zb5W+RSRE+n+VjksQWifvVaTKFhn5O8my63K8Qabdv33b379/PiAP//vuvW7BggZszZ072/+TJk91YgkafPn166zXB1rQHFvouAWHq9z3SEevSUerqCn2/dDCeta2jxYbr69evk4MHDyY7d+7MjhMnTiTPnz9Pfv/+nfQT2ggpO2dMF8cghuoM7Ygj5iWCqRlGFml0QC/ftGmTmzt3rmsaKDsgBSPh0/8yPeLLBihLkOKJc0jp8H8vUzcxIA1k6QJ/c78tWEyj5P3o4u9+jywNPdJi5rAH9x0KHcl4Hg570eQp3+vHXGyrmEeigzQsQsjavXt38ujRo44LQuDDhw+TW7duRS1HGgMxhNXHgflaNTOsHyKvHK5Ijo2jbFjJBQK9YwFd6RVMzfgRBmEfP37suBBm/p49e1qjEP2mwTViNRo0VJWH1deMXcNK08uUjVUu7s/zRaL+oLNxz1bpANco4npUgX4G2eFbpDFyQoQxojBCpEGSytmOH8qrH5Q9vuzD6ofQylkCUmh8DBAr+q8JCyVNtWQIidKQE9wNtLSQnS4jDSsxNHogzFuQBw4cyM61UKVsjfr3ooBkPSqqQHesUPWVtzi9/vQi1T+rJj7WiTz4Pt/l3LxUkr5P2VYZaZ4URpsE+st/dujQoaBBYokbrz/8TJNQYLSonrPS9kUaSkPeZyj1AWSj+d+VBoy1pIWVNed8P0Ll/ee5HdGRhrHhR5GGN0r4LGZBaj8oFDJitBTJzIZgFcmU0Y8ytWMZMzJOaXUSrUs5RxKnrxmbb5YXO9VGUhtpXldhEUogFr3IzIsvlpmdosVcGVGXFWp2oU9kLFL3dEkSz6NHEY1sjSRdIuDFWEhd8KxFqsRi1uM/nz9/zpxnwlESONdg6dKlbsaMGS4EHFHtjFIDHwKOo46l4TxSuxgDzi+rE2jg+BaFruOX4HXa0Nnf1lwAPufZeF8/r6zD97WK2qFnGjBxTw5qNGPxT+5T/r7/7RawFC3j4vTp09koCxkeHjqbHJqArmH5UrFKKksnxrK7FuRIs8STfBZv+luugXZ2pR/pP9Ois4z+TiMzUUkUjD0iEi1fzX8GmXyuxUBRcaUfykV0YZnlJGKQpOiGB76x5GeWkWWJc3mOrK6S7xdND+W5N6XyaRgtWJFe13GkaZnKOsYqGdOVVVbGupsyA/l7emTLHi7vwTdirNEt0qxnzAvBFcnQF16xh/TMpUuXHDowhlA9vQVraQhkudRdzOnK+04ZSP3DUhVSP61YsaLtd/ks7ZgtPcXqPqEafHkdqa84X6aCeL7YWlv6edGFHb+ZFICPlljHhg0bKuk0CSvVznWsotRu433alNdFrqG45ejoaPCaUkWERpLXjzFL2Rpllp7PJU2a/v7Ab8N05/9t27Z16KUqoFGsxnI9EosS2niSYg9SpU6B4JgTrvVW1flt1sT+0ADIJU2maXzcUTraGCRaL1Wp9rUMk16PMom8QhruxzvZIegJjFU7LLCePfS8uaQdPny4jTTL0dbee5mYokQsXTIWNY46kuMbnt8Kmec+LGWtOVIl9cT1rCB0V8WqkjAsRwta93TbwNYoGKsUSChN44lgBNCoHLHzquYKrU6qZ8lolCIN0Rh6cP0Q3U6I6IXILYOQI513hJaSKAorFpuHXJNfVlpRtmYBk1Su1obZr5dnKAO+L10Hrj3WZW+E3qh6IszE37F6EB+68mGpvKm4eb9bFrlzrok7fvr0Kfv727dvWRmdVTJHw0qiiCUSZ6wCK+7XL/AcsgNyL74DQQ730sv78Su7+t/A36MdY0sW5o40ahslXr58aZ5HtZB8GH64m9EmMZ7FpYw4T6QnrZfgenrhFxaSiSGXtPnz57e9TkNZLvTjeqhr734CNtrK41L40sUQckmj1lGKQ0rC37x544r8eNXRpnVE3ZZY7zXo8NomiO0ZUCj2uHz58rbXoZ6gc0uA+F6ZeKS/jhRDUq8MKrTho9fEkihMmhxtBI1DxKFY9XLpVcSkfoi8JGnToZO5sU5aiDQIW716ddt7ZLYtMQlhECdBGXZZMWldY5BHm5xgAroWj4C0hbYkSc/jBmggIrXJWlZM6pSETsEPGqZOndr2uuuR5rF169a2HoHPdurUKZM4CO1WTPqaDaAd+GFGKdIQkxAn9RuEWcTRyN2KSUgiSgF5aWzPTeA/lN5rZubMmR2bE4SIC4nJoltgAV/dVefZm72AtctUCJU2CMJ327hxY9t7EHbkyJFseq+EJSY16RPo3Dkq1kkr7+q0bNmyDuLQcZBEPYmHVdOBiJyIlrRDq41YPWfXOxUysi5fvtyaj+2BpcnsUV/oSoEMOk2CQGlr4ckhBwaetBhjCwH0ZHtJROPJkyc7UjcYLDjmrH7ADTEBXFfOYmB0k9oYBOjJ8b4aOYSe7QkKcYhFlq3QYLQhSidNmtS2RATwy8YOM3EQJsUjKiaWZ+vZToUQgzhkHXudb/PW5YMHD9yZM2faPsMwoc7RciYJXbGuBqJ1UIGKKLv915jsvgtJxCZDubdXr165mzdvtr1Hz5LONA8jrUwKPqsmVesKa49S3Q4WxmRPUEYdTjgiUcfUwLx589ySJUva3oMkP6IYddq6HMS4o55xBJBUeRjzfa4Zdeg56QZ43LhxoyPo7Lf1kNt7oO8wWAbNwaYjIv5lhyS7kRf96dvm5Jah8vfvX3flyhX35cuX6HfzFHOToS1H4BenCaHvO8pr8iDuwoUL7tevX+b5ZdbBair0xkFIlFDlW4ZknEClsp/TzXyAKVOmmHWFVSbDNw1l1+4f90U6IY/q4V27dpnE9bJ+v87QEydjqx/UamVVPRG+mwkNTYN+9tjkwzEx+atCm/X9WvWtDtAb68Wy9LXa1UmvCDDIpPkyOQ5ZwSzJ4jMrvFcr0rSjOUh+GcT4LSg5ugkW1Io0/SCDQBojh0hPlaJdah+tkVYrnTZowP8iq1F1TgMBBauufyB33x1v+NWFYmT5KmppgHC+NkAgbmRkpD3yn9QIseXymoTQFGQmIOKTxiZIWpvAatenVqRVXf2nTrAWMsPnKrMZHz6bJq5jvce6QK8J1cQNgKxlJapMPdZSR64/UivS9NztpkVEdKcrs5alhhWP9NeqlfWopzhZScI6QxseegZRGeg5a8C3Re1Mfl1ScP36ddcUaMuv24iOJtz7sbUjTS4qBvKmstYJoUauiuD3k5qhyr7QdUHMeCgLa1Ear9NquemdXgmum4fvJ6w1lqsuDhNrg1qSpleJK7K3TF0Q2jSd94uSZ60kK1e3qyVpQK6PVWXp2/FC3mp6jBhKKOiY2h3gtUV64TWM6wDETRPLDfSakXmH3w8g9Jlug8ZtTt4kVF0kLUYYmCCtD/DrQ5YhMGbA9L3ucdjh0y8kOHW5gU/VEEmJTcL4Pz/f7mgoAbYkAAAAAElFTkSuQmCC" + } + } + ] + } + ], + "max_tokens": 300 + }' + +curl http://localhost:11434/v1/completions \ + -H "Content-Type: application/json" \ + -d '{ + "model": "llama3.2", + "prompt": "Say this is a test" + }' + +curl http://localhost:11434/v1/models + +curl http://localhost:11434/v1/models/llama3.2 + +curl http://localhost:11434/v1/embeddings \ + -H "Content-Type: application/json" \ + -d '{ + "model": "all-minilm", + "input": ["why is the sky blue?", "why is the grass green?"] + }' +``` + +## Endpoints + +### `/v1/chat/completions` + +#### Supported features + +- [x] Chat completions +- [x] Streaming +- [x] JSON mode +- [x] Reproducible outputs +- [x] Vision +- [x] Tools +- [ ] Logprobs + +#### Supported request fields + +- [x] `model` +- [x] `messages` + - [x] Text `content` + - [x] Image `content` + - [x] Base64 encoded image + - [ ] Image URL + - [x] Array of `content` parts +- [x] `frequency_penalty` +- [x] `presence_penalty` +- [x] `response_format` +- [x] `seed` +- [x] `stop` +- [x] `stream` +- [x] `stream_options` + - [x] `include_usage` +- [x] `temperature` +- [x] `top_p` +- [x] `max_tokens` +- [x] `tools` +- [ ] `tool_choice` +- [ ] `logit_bias` +- [ ] `user` +- [ ] `n` + +### `/v1/completions` + +#### Supported features + +- [x] Completions +- [x] Streaming +- [x] JSON mode +- [x] Reproducible outputs +- [ ] Logprobs + +#### Supported request fields + +- [x] `model` +- [x] `prompt` +- [x] `frequency_penalty` +- [x] `presence_penalty` +- [x] `seed` +- [x] `stop` +- [x] `stream` +- [x] `stream_options` + - [x] `include_usage` +- [x] `temperature` +- [x] `top_p` +- [x] `max_tokens` +- [x] `suffix` +- [ ] `best_of` +- [ ] `echo` +- [ ] `logit_bias` +- [ ] `user` +- [ ] `n` + +#### Notes + +- `prompt` currently only accepts a string + +### `/v1/models` + +#### Notes + +- `created` corresponds to when the model was last modified +- `owned_by` corresponds to the ollama username, defaulting to `"library"` + +### `/v1/models/{model}` + +#### Notes + +- `created` corresponds to when the model was last modified +- `owned_by` corresponds to the ollama username, defaulting to `"library"` + +### `/v1/embeddings` + +#### Supported request fields + +- [x] `model` +- [x] `input` + - [x] string + - [x] array of strings + - [ ] array of tokens + - [ ] array of token arrays +- [ ] `encoding format` +- [ ] `dimensions` +- [ ] `user` + +## Models + +Before using a model, pull it locally `ollama pull`: + +```shell +ollama pull llama3.2 +``` + +### Default model names + +For tooling that relies on default OpenAI model names such as `gpt-3.5-turbo`, use `ollama cp` to copy an existing model name to a temporary name: + +```shell +ollama cp llama3.2 gpt-3.5-turbo +``` + +Afterwards, this new model name can be specified the `model` field: + +```shell +curl http://localhost:11434/v1/chat/completions \ + -H "Content-Type: application/json" \ + -d '{ + "model": "gpt-3.5-turbo", + "messages": [ + { + "role": "user", + "content": "Hello!" + } + ] + }' +``` + +### Setting the context size + +The OpenAI API does not have a way of setting the context size for a model. If you need to change the context size, create a `Modelfile` which looks like: + +``` +FROM +PARAMETER num_ctx +``` + +Use the `ollama create mymodel` command to create a new model with the updated context size. Call the API with the updated model name: + +```shell +curl http://localhost:11434/v1/chat/completions \ + -H "Content-Type: application/json" \ + -d '{ + "model": "mymodel", + "messages": [ + { + "role": "user", + "content": "Hello!" + } + ] + }' +``` diff --git a/docs/template.md b/docs/template.md new file mode 100644 index 0000000..636d72f --- /dev/null +++ b/docs/template.md @@ -0,0 +1,167 @@ +# Template + +Ollama provides a powerful templating engine backed by Go's built-in templating engine to construct prompts for your large language model. This feature is a valuable tool to get the most out of your models. + +## Basic Template Structure + +A basic Go template consists of three main parts: + +* **Layout**: The overall structure of the template. +* **Variables**: Placeholders for dynamic data that will be replaced with actual values when the template is rendered. +* **Functions**: Custom functions or logic that can be used to manipulate the template's content. + +Here's an example of a simple chat template: + +```go +{{- range .Messages }} +{{ .Role }}: {{ .Content }} +{{- end }} +``` + +In this example, we have: + +* A basic messages structure (layout) +* Three variables: `Messages`, `Role`, and `Content` (variables) +* A custom function (action) that iterates over an array of items (`range .Messages`) and displays each item + +## Adding templates to your model + +By default, models imported into Ollama have a default template of `{{ .Prompt }}`, i.e. user inputs are sent verbatim to the LLM. This is appropriate for text or code completion models but lacks essential markers for chat or instruction models. + +Omitting a template in these models puts the responsibility of correctly templating input onto the user. Adding a template allows users to easily get the best results from the model. + +To add templates in your model, you'll need to add a `TEMPLATE` command to the Modelfile. Here's an example using Meta's Llama 3. + +```dockerfile +FROM llama3.2 + +TEMPLATE """{{- if .System }}<|start_header_id|>system<|end_header_id|> + +{{ .System }}<|eot_id|> +{{- end }} +{{- range .Messages }}<|start_header_id|>{{ .Role }}<|end_header_id|> + +{{ .Content }}<|eot_id|> +{{- end }}<|start_header_id|>assistant<|end_header_id|> + +""" +``` + +## Variables + +`System` (string): system prompt + +`Prompt` (string): user prompt + +`Response` (string): assistant response + +`Suffix` (string): text inserted after the assistant's response + +`Messages` (list): list of messages + +`Messages[].Role` (string): role which can be one of `system`, `user`, `assistant`, or `tool` + +`Messages[].Content` (string): message content + +`Messages[].ToolCalls` (list): list of tools the model wants to call + +`Messages[].ToolCalls[].Function` (object): function to call + +`Messages[].ToolCalls[].Function.Name` (string): function name + +`Messages[].ToolCalls[].Function.Arguments` (map): mapping of argument name to argument value + +`Tools` (list): list of tools the model can access + +`Tools[].Type` (string): schema type. `type` is always `function` + +`Tools[].Function` (object): function definition + +`Tools[].Function.Name` (string): function name + +`Tools[].Function.Description` (string): function description + +`Tools[].Function.Parameters` (object): function parameters + +`Tools[].Function.Parameters.Type` (string): schema type. `type` is always `object` + +`Tools[].Function.Parameters.Required` (list): list of required properties + +`Tools[].Function.Parameters.Properties` (map): mapping of property name to property definition + +`Tools[].Function.Parameters.Properties[].Type` (string): property type + +`Tools[].Function.Parameters.Properties[].Description` (string): property description + +`Tools[].Function.Parameters.Properties[].Enum` (list): list of valid values + +## Tips and Best Practices + +Keep the following tips and best practices in mind when working with Go templates: + +* **Be mindful of dot**: Control flow structures like `range` and `with` changes the value `.` +* **Out-of-scope variables**: Use `$.` to reference variables not currently in scope, starting from the root +* **Whitespace control**: Use `-` to trim leading (`{{-`) and trailing (`-}}`) whitespace + +## Examples + +### Example Messages + +#### ChatML + +ChatML is a popular template format. It can be used for models such as Databrick's DBRX, Intel's Neural Chat, and Microsoft's Orca 2. + +```go +{{- range .Messages }}<|im_start|>{{ .Role }} +{{ .Content }}<|im_end|> +{{ end }}<|im_start|>assistant +``` + +### Example Tools + +Tools support can be added to a model by adding a `{{ .Tools }}` node to the template. This feature is useful for models trained to call external tools and can a powerful tool for retrieving real-time data or performing complex tasks. + +#### Mistral + +Mistral v0.3 and Mixtral 8x22B supports tool calling. + +```go +{{- range $index, $_ := .Messages }} +{{- if eq .Role "user" }} +{{- if and (le (len (slice $.Messages $index)) 2) $.Tools }}[AVAILABLE_TOOLS] {{ json $.Tools }}[/AVAILABLE_TOOLS] +{{- end }}[INST] {{ if and (eq (len (slice $.Messages $index)) 1) $.System }}{{ $.System }} + +{{ end }}{{ .Content }}[/INST] +{{- else if eq .Role "assistant" }} +{{- if .Content }} {{ .Content }} +{{- else if .ToolCalls }}[TOOL_CALLS] [ +{{- range .ToolCalls }}{"name": "{{ .Function.Name }}", "arguments": {{ json .Function.Arguments }}} +{{- end }}] +{{- end }} +{{- else if eq .Role "tool" }}[TOOL_RESULTS] {"content": {{ .Content }}}[/TOOL_RESULTS] +{{- end }} +{{- end }} +``` + +### Example Fill-in-Middle + +Fill-in-middle support can be added to a model by adding a `{{ .Suffix }}` node to the template. This feature is useful for models that are trained to generate text in the middle of user input, such as code completion models. + +#### CodeLlama + +CodeLlama [7B](https://ollama.com/library/codellama:7b-code) and [13B](https://ollama.com/library/codellama:13b-code) code completion models support fill-in-middle. + +```go +
 {{ .Prompt }} {{ .Suffix }} 
+```
+
+> [!NOTE]
+> CodeLlama 34B and 70B code completion and all instruct and Python fine-tuned models do not support fill-in-middle.
+
+#### Codestral
+
+Codestral [22B](https://ollama.com/library/codestral:22b) supports fill-in-middle.
+
+```go
+[SUFFIX]{{ .Suffix }}[PREFIX] {{ .Prompt }}
+```
diff --git a/docs/troubleshooting.md b/docs/troubleshooting.md
new file mode 100644
index 0000000..ba5487f
--- /dev/null
+++ b/docs/troubleshooting.md
@@ -0,0 +1,115 @@
+# How to troubleshoot issues
+
+Sometimes Ollama may not perform as expected. One of the best ways to figure out what happened is to take a look at the logs. Find the logs on **Mac** by running the command:
+
+```shell
+cat ~/.ollama/logs/server.log
+```
+
+On **Linux** systems with systemd, the logs can be found with this command:
+
+```shell
+journalctl -u ollama --no-pager --follow --pager-end 
+```
+
+When you run Ollama in a **container**, the logs go to stdout/stderr in the container:
+
+```shell
+docker logs 
+```
+
+(Use `docker ps` to find the container name)
+
+If manually running `ollama serve` in a terminal, the logs will be on that terminal.
+
+When you run Ollama on **Windows**, there are a few different locations. You can view them in the explorer window by hitting `+R` and type in:
+- `explorer %LOCALAPPDATA%\Ollama` to view logs.  The most recent server logs will be in `server.log` and older logs will be in `server-#.log` 
+- `explorer %LOCALAPPDATA%\Programs\Ollama` to browse the binaries (The installer adds this to your user PATH)
+- `explorer %HOMEPATH%\.ollama` to browse where models and configuration is stored
+
+To enable additional debug logging to help troubleshoot problems, first **Quit the running app from the tray menu** then in a powershell terminal
+
+```powershell
+$env:OLLAMA_DEBUG="1"
+& "ollama app.exe"
+```
+
+Join the [Discord](https://discord.gg/ollama) for help interpreting the logs.
+
+## LLM libraries
+
+Ollama includes multiple LLM libraries compiled for different GPUs and CPU vector features. Ollama tries to pick the best one based on the capabilities of your system. If this autodetection has problems, or you run into other problems (e.g. crashes in your GPU) you can workaround this by forcing a specific LLM library. `cpu_avx2` will perform the best, followed by `cpu_avx` an the slowest but most compatible is `cpu`. Rosetta emulation under MacOS will work with the `cpu` library. 
+
+In the server log, you will see a message that looks something like this (varies from release to release):
+
+```
+Dynamic LLM libraries [rocm_v6 cpu cpu_avx cpu_avx2 cuda_v11 rocm_v5]
+```
+
+**Experimental LLM Library Override**
+
+You can set OLLAMA_LLM_LIBRARY to any of the available LLM libraries to bypass autodetection, so for example, if you have a CUDA card, but want to force the CPU LLM library with AVX2 vector support, use:
+
+```shell
+OLLAMA_LLM_LIBRARY="cpu_avx2" ollama serve
+```
+
+You can see what features your CPU has with the following.
+
+```shell
+cat /proc/cpuinfo| grep flags | head -1
+```
+
+## Installing older or pre-release versions on Linux
+
+If you run into problems on Linux and want to install an older version, or you'd like to try out a pre-release before it's officially released, you can tell the install script which version to install.
+
+```shell
+curl -fsSL https://ollama.com/install.sh | OLLAMA_VERSION=0.5.7 sh
+```
+
+## Linux docker
+
+If Ollama initially works on the GPU in a docker container, but then switches to running on CPU after some period of time with errors in the server log reporting GPU discovery failures, this can be resolved by disabling systemd cgroup management in Docker.  Edit `/etc/docker/daemon.json` on the host and add `"exec-opts": ["native.cgroupdriver=cgroupfs"]` to the docker configuration.
+
+## NVIDIA GPU Discovery
+
+When Ollama starts up, it takes inventory of the GPUs present in the system to determine compatibility and how much VRAM is available.  Sometimes this discovery can fail to find your GPUs.  In general, running the latest driver will yield the best results.
+
+### Linux NVIDIA Troubleshooting
+
+If you are using a container to run Ollama, make sure you've set up the container runtime first as described in [docker.md](./docker.md)
+
+Sometimes the Ollama can have difficulties initializing the GPU. When you check the server logs, this can show up as various error codes, such as "3" (not initialized), "46" (device unavailable), "100" (no device), "999" (unknown), or others. The following troubleshooting techniques may help resolve the problem
+
+- If you are using a container, is the container runtime working?  Try `docker run --gpus all ubuntu nvidia-smi` - if this doesn't work, Ollama won't be able to see your NVIDIA GPU.
+- Is the uvm driver loaded? `sudo nvidia-modprobe -u`
+- Try reloading the nvidia_uvm driver - `sudo rmmod nvidia_uvm` then `sudo modprobe nvidia_uvm`
+- Try rebooting
+- Make sure you're running the latest nvidia drivers
+
+If none of those resolve the problem, gather additional information and file an issue:
+- Set `CUDA_ERROR_LEVEL=50` and try again to get more diagnostic logs
+- Check dmesg for any errors `sudo dmesg | grep -i nvrm` and `sudo dmesg | grep -i nvidia`
+
+
+## AMD GPU Discovery
+
+On linux, AMD GPU access typically requires `video` and/or `render` group membership to access the `/dev/kfd` device.  If permissions are not set up correctly, Ollama will detect this and report an error in the server log.
+
+When running in a container, in some Linux distributions and container runtimes, the ollama process may be unable to access the GPU.  Use `ls -lnd /dev/kfd /dev/dri /dev/dri/*` on the host system to determine the **numeric** group IDs on your system, and pass additional `--group-add ...` arguments to the container so it can access the required devices.   For example, in the following output `crw-rw---- 1 0  44 226,   0 Sep 16 16:55 /dev/dri/card0` the group ID column is `44` 
+
+If you are experiencing problems getting Ollama to correctly discover or use your GPU for inference, the following may help isolate the failure.
+- `AMD_LOG_LEVEL=3` Enable info log levels in the AMD HIP/ROCm libraries.  This can help show more detailed error codes that can help troubleshoot problems
+- `OLLAMA_DEBUG=1` During GPU discovery additional information will be reported
+- Check dmesg for any errors from amdgpu or kfd drivers `sudo dmesg | grep -i amdgpu` and `sudo dmesg | grep -i kfd`
+
+## Multiple AMD GPUs
+
+If you experience gibberish responses when models load across multiple AMD GPUs on Linux, see the following guide.
+
+- https://rocm.docs.amd.com/projects/radeon/en/latest/docs/install/native_linux/mgpu.html#mgpu-known-issues-and-limitations
+
+## Windows Terminal Errors
+
+Older versions of Windows 10 (e.g., 21H1) are known to have a bug where the standard terminal program does not display control characters correctly.  This can result in a long string of strings like `←[?25h←[?25l` being displayed, sometimes erroring with `The parameter is incorrect`  To resolve this problem, please update to Win 10 22H1 or newer.
diff --git a/docs/windows.md b/docs/windows.md
new file mode 100644
index 0000000..0bffa4b
--- /dev/null
+++ b/docs/windows.md
@@ -0,0 +1,90 @@
+# Ollama Windows
+
+Welcome to Ollama for Windows.
+
+No more WSL required!
+
+Ollama now runs as a native Windows application, including NVIDIA and AMD Radeon GPU support.
+After installing Ollama for Windows, Ollama will run in the background and
+the `ollama` command line is available in `cmd`, `powershell` or your favorite
+terminal application. As usual the Ollama [api](./api.md) will be served on
+`http://localhost:11434`.
+
+## System Requirements
+
+* Windows 10 22H2 or newer, Home or Pro
+* NVIDIA 452.39 or newer Drivers if you have an NVIDIA card
+* AMD Radeon Driver https://www.amd.com/en/support if you have a Radeon card
+
+Ollama uses unicode characters for progress indication, which may render as unknown squares in some older terminal fonts in Windows 10. If you see this, try changing your terminal font settings.
+
+## Filesystem Requirements
+
+The Ollama install does not require Administrator, and installs in your home directory by default.  You'll need at least 4GB of space for the binary install.  Once you've installed Ollama, you'll need additional space for storing the Large Language models, which can be tens to hundreds of GB in size.  If your home directory doesn't have enough space, you can change where the binaries are installed, and where the models are stored.
+
+### Changing Install Location
+
+To install the Ollama application in a location different than your home directory, start the installer with the following flag
+
+```powershell
+OllamaSetup.exe /DIR="d:\some\location"
+```
+
+### Changing Model Location
+
+To change where Ollama stores the downloaded models instead of using your home directory, set the environment variable `OLLAMA_MODELS` in your user account.
+
+1. Start the Settings (Windows 11) or Control Panel (Windows 10) application and search for _environment variables_.
+
+2. Click on _Edit environment variables for your account_.
+
+3. Edit or create a new variable for your user account for `OLLAMA_MODELS` where you want the models stored
+
+4. Click OK/Apply to save.
+
+If Ollama is already running, Quit the tray application and relaunch it from the Start menu, or a new terminal started after you saved the environment variables.
+
+## API Access
+
+Here's a quick example showing API access from `powershell`
+
+```powershell
+(Invoke-WebRequest -method POST -Body '{"model":"llama3.2", "prompt":"Why is the sky blue?", "stream": false}' -uri http://localhost:11434/api/generate ).Content | ConvertFrom-json
+```
+
+## Troubleshooting
+
+Ollama on Windows stores files in a few different locations.  You can view them in
+the explorer window by hitting `+R` and type in:
+- `explorer %LOCALAPPDATA%\Ollama` contains logs, and downloaded updates
+    - *app.log* contains most resent logs from the GUI application
+    - *server.log* contains the most recent server logs
+    - *upgrade.log* contains log output for upgrades
+- `explorer %LOCALAPPDATA%\Programs\Ollama` contains the binaries (The installer adds this to your user PATH)
+- `explorer %HOMEPATH%\.ollama` contains models and configuration
+
+## Uninstall
+
+The Ollama Windows installer registers an Uninstaller application.  Under `Add or remove programs` in Windows Settings, you can uninstall Ollama.
+
+> [!NOTE]
+> If you have [changed the OLLAMA_MODELS location](#changing-model-location), the installer will not remove your downloaded models
+
+
+## Standalone CLI
+
+The easiest way to install Ollama on Windows is to use the `OllamaSetup.exe`
+installer. It installs in your account without requiring Administrator rights.
+We update Ollama regularly to support the latest models, and this installer will
+help you keep up to date.
+
+If you'd like to install or integrate Ollama as a service, a standalone
+`ollama-windows-amd64.zip` zip file is available containing only the Ollama CLI
+and GPU library dependencies for Nvidia.  If you have an AMD GPU, also download
+and extract the additional ROCm package `ollama-windows-amd64-rocm.zip` into the
+same directory.  This allows for embedding Ollama in existing applications, or
+running it as a system service via `ollama serve` with tools such as
+[NSSM](https://nssm.cc/). 
+
+> [!NOTE]  
+> If you are upgrading from a prior version, you should remove the old directories first.
diff --git a/envconfig/config.go b/envconfig/config.go
new file mode 100644
index 0000000..9d7c2e2
--- /dev/null
+++ b/envconfig/config.go
@@ -0,0 +1,308 @@
+package envconfig
+
+import (
+	"fmt"
+	"log/slog"
+	"math"
+	"net"
+	"net/url"
+	"os"
+	"path/filepath"
+	"runtime"
+	"strconv"
+	"strings"
+	"time"
+)
+
+// Host returns the scheme and host. Host can be configured via the OLLAMA_HOST environment variable.
+// Default is scheme "http" and host "127.0.0.1:11434"
+func Host() *url.URL {
+	defaultPort := "11434"
+
+	s := strings.TrimSpace(Var("OLLAMA_HOST"))
+	scheme, hostport, ok := strings.Cut(s, "://")
+	switch {
+	case !ok:
+		scheme, hostport = "http", s
+	case scheme == "http":
+		defaultPort = "80"
+	case scheme == "https":
+		defaultPort = "443"
+	}
+
+	hostport, path, _ := strings.Cut(hostport, "/")
+	host, port, err := net.SplitHostPort(hostport)
+	if err != nil {
+		host, port = "127.0.0.1", defaultPort
+		if ip := net.ParseIP(strings.Trim(hostport, "[]")); ip != nil {
+			host = ip.String()
+		} else if hostport != "" {
+			host = hostport
+		}
+	}
+
+	if n, err := strconv.ParseInt(port, 10, 32); err != nil || n > 65535 || n < 0 {
+		slog.Warn("invalid port, using default", "port", port, "default", defaultPort)
+		port = defaultPort
+	}
+
+	return &url.URL{
+		Scheme: scheme,
+		Host:   net.JoinHostPort(host, port),
+		Path:   path,
+	}
+}
+
+// AllowedOrigins returns a list of allowed origins. AllowedOrigins can be configured via the OLLAMA_ORIGINS environment variable.
+func AllowedOrigins() (origins []string) {
+	if s := Var("OLLAMA_ORIGINS"); s != "" {
+		origins = strings.Split(s, ",")
+	}
+
+	for _, origin := range []string{"localhost", "127.0.0.1", "0.0.0.0"} {
+		origins = append(origins,
+			fmt.Sprintf("http://%s", origin),
+			fmt.Sprintf("https://%s", origin),
+			fmt.Sprintf("http://%s", net.JoinHostPort(origin, "*")),
+			fmt.Sprintf("https://%s", net.JoinHostPort(origin, "*")),
+		)
+	}
+
+	origins = append(origins,
+		"app://*",
+		"file://*",
+		"tauri://*",
+		"vscode-webview://*",
+		"vscode-file://*",
+	)
+
+	return origins
+}
+
+// Models returns the path to the models directory. Models directory can be configured via the OLLAMA_MODELS environment variable.
+// Default is $HOME/.ollama/models
+func Models() string {
+	if s := Var("OLLAMA_MODELS"); s != "" {
+		return s
+	}
+
+	home, err := os.UserHomeDir()
+	if err != nil {
+		panic(err)
+	}
+
+	return filepath.Join(home, ".ollama", "models")
+}
+
+// KeepAlive returns the duration that models stay loaded in memory. KeepAlive can be configured via the OLLAMA_KEEP_ALIVE environment variable.
+// Negative values are treated as infinite. Zero is treated as no keep alive.
+// Default is 5 minutes.
+func KeepAlive() (keepAlive time.Duration) {
+	keepAlive = 5 * time.Minute
+	if s := Var("OLLAMA_KEEP_ALIVE"); s != "" {
+		if d, err := time.ParseDuration(s); err == nil {
+			keepAlive = d
+		} else if n, err := strconv.ParseInt(s, 10, 64); err == nil {
+			keepAlive = time.Duration(n) * time.Second
+		}
+	}
+
+	if keepAlive < 0 {
+		return time.Duration(math.MaxInt64)
+	}
+
+	return keepAlive
+}
+
+// LoadTimeout returns the duration for stall detection during model loads. LoadTimeout can be configured via the OLLAMA_LOAD_TIMEOUT environment variable.
+// Zero or Negative values are treated as infinite.
+// Default is 5 minutes.
+func LoadTimeout() (loadTimeout time.Duration) {
+	loadTimeout = 5 * time.Minute
+	if s := Var("OLLAMA_LOAD_TIMEOUT"); s != "" {
+		if d, err := time.ParseDuration(s); err == nil {
+			loadTimeout = d
+		} else if n, err := strconv.ParseInt(s, 10, 64); err == nil {
+			loadTimeout = time.Duration(n) * time.Second
+		}
+	}
+
+	if loadTimeout <= 0 {
+		return time.Duration(math.MaxInt64)
+	}
+
+	return loadTimeout
+}
+
+func Bool(k string) func() bool {
+	return func() bool {
+		if s := Var(k); s != "" {
+			b, err := strconv.ParseBool(s)
+			if err != nil {
+				return true
+			}
+
+			return b
+		}
+
+		return false
+	}
+}
+
+// LogLevel returns the log level for the application.
+// Values are 0 or false INFO (Default), 1 or true DEBUG, 2 TRACE
+func LogLevel() slog.Level {
+	level := slog.LevelInfo
+	if s := Var("OLLAMA_DEBUG"); s != "" {
+		if b, _ := strconv.ParseBool(s); b {
+			level = slog.LevelDebug
+		} else if i, _ := strconv.ParseInt(s, 10, 64); i != 0 {
+			level = slog.Level(i * -4)
+		}
+	}
+
+	return level
+}
+
+var (
+	// FlashAttention enables the experimental flash attention feature.
+	FlashAttention = Bool("OLLAMA_FLASH_ATTENTION")
+	// KvCacheType is the quantization type for the K/V cache.
+	KvCacheType = String("OLLAMA_KV_CACHE_TYPE")
+	// NoHistory disables readline history.
+	NoHistory = Bool("OLLAMA_NOHISTORY")
+	// NoPrune disables pruning of model blobs on startup.
+	NoPrune = Bool("OLLAMA_NOPRUNE")
+	// SchedSpread allows scheduling models across all GPUs.
+	SchedSpread = Bool("OLLAMA_SCHED_SPREAD")
+	// IntelGPU enables experimental Intel GPU detection.
+	IntelGPU = Bool("OLLAMA_INTEL_GPU")
+	// MultiUserCache optimizes prompt caching for multi-user scenarios
+	MultiUserCache = Bool("OLLAMA_MULTIUSER_CACHE")
+	// Enable the new Ollama engine
+	NewEngine = Bool("OLLAMA_NEW_ENGINE")
+	// ContextLength sets the default context length
+	ContextLength = Uint("OLLAMA_CONTEXT_LENGTH", 4096)
+)
+
+func String(s string) func() string {
+	return func() string {
+		return Var(s)
+	}
+}
+
+var (
+	LLMLibrary = String("OLLAMA_LLM_LIBRARY")
+
+	CudaVisibleDevices    = String("CUDA_VISIBLE_DEVICES")
+	HipVisibleDevices     = String("HIP_VISIBLE_DEVICES")
+	RocrVisibleDevices    = String("ROCR_VISIBLE_DEVICES")
+	GpuDeviceOrdinal      = String("GPU_DEVICE_ORDINAL")
+	HsaOverrideGfxVersion = String("HSA_OVERRIDE_GFX_VERSION")
+)
+
+func Uint(key string, defaultValue uint) func() uint {
+	return func() uint {
+		if s := Var(key); s != "" {
+			if n, err := strconv.ParseUint(s, 10, 64); err != nil {
+				slog.Warn("invalid environment variable, using default", "key", key, "value", s, "default", defaultValue)
+			} else {
+				return uint(n)
+			}
+		}
+
+		return defaultValue
+	}
+}
+
+var (
+	// NumParallel sets the number of parallel model requests. NumParallel can be configured via the OLLAMA_NUM_PARALLEL environment variable.
+	NumParallel = Uint("OLLAMA_NUM_PARALLEL", 0)
+	// MaxRunners sets the maximum number of loaded models. MaxRunners can be configured via the OLLAMA_MAX_LOADED_MODELS environment variable.
+	MaxRunners = Uint("OLLAMA_MAX_LOADED_MODELS", 0)
+	// MaxQueue sets the maximum number of queued requests. MaxQueue can be configured via the OLLAMA_MAX_QUEUE environment variable.
+	MaxQueue = Uint("OLLAMA_MAX_QUEUE", 512)
+)
+
+func Uint64(key string, defaultValue uint64) func() uint64 {
+	return func() uint64 {
+		if s := Var(key); s != "" {
+			if n, err := strconv.ParseUint(s, 10, 64); err != nil {
+				slog.Warn("invalid environment variable, using default", "key", key, "value", s, "default", defaultValue)
+			} else {
+				return n
+			}
+		}
+
+		return defaultValue
+	}
+}
+
+// Set aside VRAM per GPU
+var GpuOverhead = Uint64("OLLAMA_GPU_OVERHEAD", 0)
+
+type EnvVar struct {
+	Name        string
+	Value       any
+	Description string
+}
+
+func AsMap() map[string]EnvVar {
+	ret := map[string]EnvVar{
+		"OLLAMA_DEBUG":             {"OLLAMA_DEBUG", LogLevel(), "Show additional debug information (e.g. OLLAMA_DEBUG=1)"},
+		"OLLAMA_FLASH_ATTENTION":   {"OLLAMA_FLASH_ATTENTION", FlashAttention(), "Enabled flash attention"},
+		"OLLAMA_KV_CACHE_TYPE":     {"OLLAMA_KV_CACHE_TYPE", KvCacheType(), "Quantization type for the K/V cache (default: f16)"},
+		"OLLAMA_GPU_OVERHEAD":      {"OLLAMA_GPU_OVERHEAD", GpuOverhead(), "Reserve a portion of VRAM per GPU (bytes)"},
+		"OLLAMA_HOST":              {"OLLAMA_HOST", Host(), "IP Address for the ollama server (default 127.0.0.1:11434)"},
+		"OLLAMA_KEEP_ALIVE":        {"OLLAMA_KEEP_ALIVE", KeepAlive(), "The duration that models stay loaded in memory (default \"5m\")"},
+		"OLLAMA_LLM_LIBRARY":       {"OLLAMA_LLM_LIBRARY", LLMLibrary(), "Set LLM library to bypass autodetection"},
+		"OLLAMA_LOAD_TIMEOUT":      {"OLLAMA_LOAD_TIMEOUT", LoadTimeout(), "How long to allow model loads to stall before giving up (default \"5m\")"},
+		"OLLAMA_MAX_LOADED_MODELS": {"OLLAMA_MAX_LOADED_MODELS", MaxRunners(), "Maximum number of loaded models per GPU"},
+		"OLLAMA_MAX_QUEUE":         {"OLLAMA_MAX_QUEUE", MaxQueue(), "Maximum number of queued requests"},
+		"OLLAMA_MODELS":            {"OLLAMA_MODELS", Models(), "The path to the models directory"},
+		"OLLAMA_NOHISTORY":         {"OLLAMA_NOHISTORY", NoHistory(), "Do not preserve readline history"},
+		"OLLAMA_NOPRUNE":           {"OLLAMA_NOPRUNE", NoPrune(), "Do not prune model blobs on startup"},
+		"OLLAMA_NUM_PARALLEL":      {"OLLAMA_NUM_PARALLEL", NumParallel(), "Maximum number of parallel requests"},
+		"OLLAMA_ORIGINS":           {"OLLAMA_ORIGINS", AllowedOrigins(), "A comma separated list of allowed origins"},
+		"OLLAMA_SCHED_SPREAD":      {"OLLAMA_SCHED_SPREAD", SchedSpread(), "Always schedule model across all GPUs"},
+		"OLLAMA_MULTIUSER_CACHE":   {"OLLAMA_MULTIUSER_CACHE", MultiUserCache(), "Optimize prompt caching for multi-user scenarios"},
+		"OLLAMA_CONTEXT_LENGTH":    {"OLLAMA_CONTEXT_LENGTH", ContextLength(), "Context length to use unless otherwise specified (default: 4096)"},
+		"OLLAMA_NEW_ENGINE":        {"OLLAMA_NEW_ENGINE", NewEngine(), "Enable the new Ollama engine"},
+
+		// Informational
+		"HTTP_PROXY":  {"HTTP_PROXY", String("HTTP_PROXY")(), "HTTP proxy"},
+		"HTTPS_PROXY": {"HTTPS_PROXY", String("HTTPS_PROXY")(), "HTTPS proxy"},
+		"NO_PROXY":    {"NO_PROXY", String("NO_PROXY")(), "No proxy"},
+	}
+
+	if runtime.GOOS != "windows" {
+		// Windows environment variables are case-insensitive so there's no need to duplicate them
+		ret["http_proxy"] = EnvVar{"http_proxy", String("http_proxy")(), "HTTP proxy"}
+		ret["https_proxy"] = EnvVar{"https_proxy", String("https_proxy")(), "HTTPS proxy"}
+		ret["no_proxy"] = EnvVar{"no_proxy", String("no_proxy")(), "No proxy"}
+	}
+
+	if runtime.GOOS != "darwin" {
+		ret["CUDA_VISIBLE_DEVICES"] = EnvVar{"CUDA_VISIBLE_DEVICES", CudaVisibleDevices(), "Set which NVIDIA devices are visible"}
+		ret["HIP_VISIBLE_DEVICES"] = EnvVar{"HIP_VISIBLE_DEVICES", HipVisibleDevices(), "Set which AMD devices are visible by numeric ID"}
+		ret["ROCR_VISIBLE_DEVICES"] = EnvVar{"ROCR_VISIBLE_DEVICES", RocrVisibleDevices(), "Set which AMD devices are visible by UUID or numeric ID"}
+		ret["GPU_DEVICE_ORDINAL"] = EnvVar{"GPU_DEVICE_ORDINAL", GpuDeviceOrdinal(), "Set which AMD devices are visible by numeric ID"}
+		ret["HSA_OVERRIDE_GFX_VERSION"] = EnvVar{"HSA_OVERRIDE_GFX_VERSION", HsaOverrideGfxVersion(), "Override the gfx used for all detected AMD GPUs"}
+		ret["OLLAMA_INTEL_GPU"] = EnvVar{"OLLAMA_INTEL_GPU", IntelGPU(), "Enable experimental Intel GPU detection"}
+	}
+
+	return ret
+}
+
+func Values() map[string]string {
+	vals := make(map[string]string)
+	for k, v := range AsMap() {
+		vals[k] = fmt.Sprintf("%v", v.Value)
+	}
+	return vals
+}
+
+// Var returns an environment variable stripped of leading and trailing quotes or spaces
+func Var(key string) string {
+	return strings.Trim(strings.TrimSpace(os.Getenv(key)), "\"'")
+}
diff --git a/envconfig/config_test.go b/envconfig/config_test.go
new file mode 100644
index 0000000..f232f1c
--- /dev/null
+++ b/envconfig/config_test.go
@@ -0,0 +1,327 @@
+package envconfig
+
+import (
+	"log/slog"
+	"math"
+	"testing"
+	"time"
+
+	"github.com/google/go-cmp/cmp"
+	"github.com/ollama/ollama/logutil"
+)
+
+func TestHost(t *testing.T) {
+	cases := map[string]struct {
+		value  string
+		expect string
+	}{
+		"empty":               {"", "http://127.0.0.1:11434"},
+		"only address":        {"1.2.3.4", "http://1.2.3.4:11434"},
+		"only port":           {":1234", "http://:1234"},
+		"address and port":    {"1.2.3.4:1234", "http://1.2.3.4:1234"},
+		"hostname":            {"example.com", "http://example.com:11434"},
+		"hostname and port":   {"example.com:1234", "http://example.com:1234"},
+		"zero port":           {":0", "http://:0"},
+		"too large port":      {":66000", "http://:11434"},
+		"too small port":      {":-1", "http://:11434"},
+		"ipv6 localhost":      {"[::1]", "http://[::1]:11434"},
+		"ipv6 world open":     {"[::]", "http://[::]:11434"},
+		"ipv6 no brackets":    {"::1", "http://[::1]:11434"},
+		"ipv6 + port":         {"[::1]:1337", "http://[::1]:1337"},
+		"extra space":         {" 1.2.3.4 ", "http://1.2.3.4:11434"},
+		"extra quotes":        {"\"1.2.3.4\"", "http://1.2.3.4:11434"},
+		"extra space+quotes":  {" \" 1.2.3.4 \" ", "http://1.2.3.4:11434"},
+		"extra single quotes": {"'1.2.3.4'", "http://1.2.3.4:11434"},
+		"http":                {"http://1.2.3.4", "http://1.2.3.4:80"},
+		"http port":           {"http://1.2.3.4:4321", "http://1.2.3.4:4321"},
+		"https":               {"https://1.2.3.4", "https://1.2.3.4:443"},
+		"https port":          {"https://1.2.3.4:4321", "https://1.2.3.4:4321"},
+		"proxy path":          {"https://example.com/ollama", "https://example.com:443/ollama"},
+	}
+
+	for name, tt := range cases {
+		t.Run(name, func(t *testing.T) {
+			t.Setenv("OLLAMA_HOST", tt.value)
+			if host := Host(); host.String() != tt.expect {
+				t.Errorf("%s: expected %s, got %s", name, tt.expect, host.String())
+			}
+		})
+	}
+}
+
+func TestOrigins(t *testing.T) {
+	cases := []struct {
+		value  string
+		expect []string
+	}{
+		{"", []string{
+			"http://localhost",
+			"https://localhost",
+			"http://localhost:*",
+			"https://localhost:*",
+			"http://127.0.0.1",
+			"https://127.0.0.1",
+			"http://127.0.0.1:*",
+			"https://127.0.0.1:*",
+			"http://0.0.0.0",
+			"https://0.0.0.0",
+			"http://0.0.0.0:*",
+			"https://0.0.0.0:*",
+			"app://*",
+			"file://*",
+			"tauri://*",
+			"vscode-webview://*",
+			"vscode-file://*",
+		}},
+		{"http://10.0.0.1", []string{
+			"http://10.0.0.1",
+			"http://localhost",
+			"https://localhost",
+			"http://localhost:*",
+			"https://localhost:*",
+			"http://127.0.0.1",
+			"https://127.0.0.1",
+			"http://127.0.0.1:*",
+			"https://127.0.0.1:*",
+			"http://0.0.0.0",
+			"https://0.0.0.0",
+			"http://0.0.0.0:*",
+			"https://0.0.0.0:*",
+			"app://*",
+			"file://*",
+			"tauri://*",
+			"vscode-webview://*",
+			"vscode-file://*",
+		}},
+		{"http://172.16.0.1,https://192.168.0.1", []string{
+			"http://172.16.0.1",
+			"https://192.168.0.1",
+			"http://localhost",
+			"https://localhost",
+			"http://localhost:*",
+			"https://localhost:*",
+			"http://127.0.0.1",
+			"https://127.0.0.1",
+			"http://127.0.0.1:*",
+			"https://127.0.0.1:*",
+			"http://0.0.0.0",
+			"https://0.0.0.0",
+			"http://0.0.0.0:*",
+			"https://0.0.0.0:*",
+			"app://*",
+			"file://*",
+			"tauri://*",
+			"vscode-webview://*",
+			"vscode-file://*",
+		}},
+		{"http://totally.safe,http://definitely.legit", []string{
+			"http://totally.safe",
+			"http://definitely.legit",
+			"http://localhost",
+			"https://localhost",
+			"http://localhost:*",
+			"https://localhost:*",
+			"http://127.0.0.1",
+			"https://127.0.0.1",
+			"http://127.0.0.1:*",
+			"https://127.0.0.1:*",
+			"http://0.0.0.0",
+			"https://0.0.0.0",
+			"http://0.0.0.0:*",
+			"https://0.0.0.0:*",
+			"app://*",
+			"file://*",
+			"tauri://*",
+			"vscode-webview://*",
+			"vscode-file://*",
+		}},
+	}
+	for _, tt := range cases {
+		t.Run(tt.value, func(t *testing.T) {
+			t.Setenv("OLLAMA_ORIGINS", tt.value)
+
+			if diff := cmp.Diff(AllowedOrigins(), tt.expect); diff != "" {
+				t.Errorf("%s: mismatch (-want +got):\n%s", tt.value, diff)
+			}
+		})
+	}
+}
+
+func TestBool(t *testing.T) {
+	cases := map[string]bool{
+		"":      false,
+		"true":  true,
+		"false": false,
+		"1":     true,
+		"0":     false,
+		// invalid values
+		"random":    true,
+		"something": true,
+	}
+
+	for k, v := range cases {
+		t.Run(k, func(t *testing.T) {
+			t.Setenv("OLLAMA_BOOL", k)
+			if b := Bool("OLLAMA_BOOL")(); b != v {
+				t.Errorf("%s: expected %t, got %t", k, v, b)
+			}
+		})
+	}
+}
+
+func TestUint(t *testing.T) {
+	cases := map[string]uint{
+		"0":    0,
+		"1":    1,
+		"1337": 1337,
+		// default values
+		"":       11434,
+		"-1":     11434,
+		"0o10":   11434,
+		"0x10":   11434,
+		"string": 11434,
+	}
+
+	for k, v := range cases {
+		t.Run(k, func(t *testing.T) {
+			t.Setenv("OLLAMA_UINT", k)
+			if i := Uint("OLLAMA_UINT", 11434)(); i != v {
+				t.Errorf("%s: expected %d, got %d", k, v, i)
+			}
+		})
+	}
+}
+
+func TestKeepAlive(t *testing.T) {
+	cases := map[string]time.Duration{
+		"":       5 * time.Minute,
+		"1s":     time.Second,
+		"1m":     time.Minute,
+		"1h":     time.Hour,
+		"5m0s":   5 * time.Minute,
+		"1h2m3s": 1*time.Hour + 2*time.Minute + 3*time.Second,
+		"0":      time.Duration(0),
+		"60":     60 * time.Second,
+		"120":    2 * time.Minute,
+		"3600":   time.Hour,
+		"-0":     time.Duration(0),
+		"-1":     time.Duration(math.MaxInt64),
+		"-1m":    time.Duration(math.MaxInt64),
+		// invalid values
+		" ":   5 * time.Minute,
+		"???": 5 * time.Minute,
+		"1d":  5 * time.Minute,
+		"1y":  5 * time.Minute,
+		"1w":  5 * time.Minute,
+	}
+
+	for tt, expect := range cases {
+		t.Run(tt, func(t *testing.T) {
+			t.Setenv("OLLAMA_KEEP_ALIVE", tt)
+			if actual := KeepAlive(); actual != expect {
+				t.Errorf("%s: expected %s, got %s", tt, expect, actual)
+			}
+		})
+	}
+}
+
+func TestLoadTimeout(t *testing.T) {
+	defaultTimeout := 5 * time.Minute
+	cases := map[string]time.Duration{
+		"":       defaultTimeout,
+		"1s":     time.Second,
+		"1m":     time.Minute,
+		"1h":     time.Hour,
+		"5m0s":   defaultTimeout,
+		"1h2m3s": 1*time.Hour + 2*time.Minute + 3*time.Second,
+		"0":      time.Duration(math.MaxInt64),
+		"60":     60 * time.Second,
+		"120":    2 * time.Minute,
+		"3600":   time.Hour,
+		"-0":     time.Duration(math.MaxInt64),
+		"-1":     time.Duration(math.MaxInt64),
+		"-1m":    time.Duration(math.MaxInt64),
+		// invalid values
+		" ":   defaultTimeout,
+		"???": defaultTimeout,
+		"1d":  defaultTimeout,
+		"1y":  defaultTimeout,
+		"1w":  defaultTimeout,
+	}
+
+	for tt, expect := range cases {
+		t.Run(tt, func(t *testing.T) {
+			t.Setenv("OLLAMA_LOAD_TIMEOUT", tt)
+			if actual := LoadTimeout(); actual != expect {
+				t.Errorf("%s: expected %s, got %s", tt, expect, actual)
+			}
+		})
+	}
+}
+
+func TestVar(t *testing.T) {
+	cases := map[string]string{
+		"value":       "value",
+		" value ":     "value",
+		" 'value' ":   "value",
+		` "value" `:   "value",
+		" ' value ' ": " value ",
+		` " value " `: " value ",
+	}
+
+	for k, v := range cases {
+		t.Run(k, func(t *testing.T) {
+			t.Setenv("OLLAMA_VAR", k)
+			if s := Var("OLLAMA_VAR"); s != v {
+				t.Errorf("%s: expected %q, got %q", k, v, s)
+			}
+		})
+	}
+}
+
+func TestContextLength(t *testing.T) {
+	cases := map[string]uint{
+		"":     4096,
+		"2048": 2048,
+	}
+
+	for k, v := range cases {
+		t.Run(k, func(t *testing.T) {
+			t.Setenv("OLLAMA_CONTEXT_LENGTH", k)
+			if i := ContextLength(); i != v {
+				t.Errorf("%s: expected %d, got %d", k, v, i)
+			}
+		})
+	}
+}
+
+func TestLogLevel(t *testing.T) {
+	cases := map[string]slog.Level{
+		// Default to INFO
+		"":      slog.LevelInfo,
+		"false": slog.LevelInfo,
+		"f":     slog.LevelInfo,
+		"0":     slog.LevelInfo,
+
+		// True values enable Debug
+		"true": slog.LevelDebug,
+		"t":    slog.LevelDebug,
+
+		// Positive values increase verbosity
+		"1": slog.LevelDebug,
+		"2": logutil.LevelTrace,
+
+		// Negative values decrease verbosity
+		"-1": slog.LevelWarn,
+		"-2": slog.LevelError,
+	}
+
+	for k, v := range cases {
+		t.Run(k, func(t *testing.T) {
+			t.Setenv("OLLAMA_DEBUG", k)
+			if i := LogLevel(); i != v {
+				t.Errorf("%s: expected %d, got %d", k, v, i)
+			}
+		})
+	}
+}
diff --git a/format/bytes.go b/format/bytes.go
new file mode 100644
index 0000000..a24231d
--- /dev/null
+++ b/format/bytes.go
@@ -0,0 +1,63 @@
+package format
+
+import (
+	"fmt"
+	"math"
+)
+
+const (
+	Byte = 1
+
+	KiloByte = Byte * 1000
+	MegaByte = KiloByte * 1000
+	GigaByte = MegaByte * 1000
+	TeraByte = GigaByte * 1000
+
+	KibiByte = Byte * 1024
+	MebiByte = KibiByte * 1024
+	GibiByte = MebiByte * 1024
+)
+
+func HumanBytes(b int64) string {
+	var value float64
+	var unit string
+
+	switch {
+	case b >= TeraByte:
+		value = float64(b) / TeraByte
+		unit = "TB"
+	case b >= GigaByte:
+		value = float64(b) / GigaByte
+		unit = "GB"
+	case b >= MegaByte:
+		value = float64(b) / MegaByte
+		unit = "MB"
+	case b >= KiloByte:
+		value = float64(b) / KiloByte
+		unit = "KB"
+	default:
+		return fmt.Sprintf("%d B", b)
+	}
+
+	switch {
+	case value >= 10:
+		return fmt.Sprintf("%d %s", int(value), unit)
+	case value != math.Trunc(value):
+		return fmt.Sprintf("%.1f %s", value, unit)
+	default:
+		return fmt.Sprintf("%d %s", int(value), unit)
+	}
+}
+
+func HumanBytes2(b uint64) string {
+	switch {
+	case b >= GibiByte:
+		return fmt.Sprintf("%.1f GiB", float64(b)/GibiByte)
+	case b >= MebiByte:
+		return fmt.Sprintf("%.1f MiB", float64(b)/MebiByte)
+	case b >= KibiByte:
+		return fmt.Sprintf("%.1f KiB", float64(b)/KibiByte)
+	default:
+		return fmt.Sprintf("%d B", b)
+	}
+}
diff --git a/format/bytes_test.go b/format/bytes_test.go
new file mode 100644
index 0000000..5881af4
--- /dev/null
+++ b/format/bytes_test.go
@@ -0,0 +1,91 @@
+package format
+
+import (
+	"testing"
+)
+
+func TestHumanBytes(t *testing.T) {
+	type testCase struct {
+		input    int64
+		expected string
+	}
+
+	tests := []testCase{
+		// Test bytes (B)
+		{0, "0 B"},
+		{1, "1 B"},
+		{999, "999 B"},
+
+		// Test kilobytes (KB)
+		{1000, "1 KB"},
+		{1500, "1.5 KB"},
+		{999999, "999 KB"},
+
+		// Test megabytes (MB)
+		{1000000, "1 MB"},
+		{1500000, "1.5 MB"},
+		{999999999, "999 MB"},
+
+		// Test gigabytes (GB)
+		{1000000000, "1 GB"},
+		{1500000000, "1.5 GB"},
+		{999999999999, "999 GB"},
+
+		// Test terabytes (TB)
+		{1000000000000, "1 TB"},
+		{1500000000000, "1.5 TB"},
+		{1999999999999, "2.0 TB"},
+
+		// Test fractional values
+		{1234, "1.2 KB"},
+		{1234567, "1.2 MB"},
+		{1234567890, "1.2 GB"},
+	}
+
+	for _, tc := range tests {
+		t.Run(tc.expected, func(t *testing.T) {
+			result := HumanBytes(tc.input)
+			if result != tc.expected {
+				t.Errorf("Expected %s, got %s", tc.expected, result)
+			}
+		})
+	}
+}
+
+func TestHumanBytes2(t *testing.T) {
+	type testCase struct {
+		input    uint64
+		expected string
+	}
+
+	tests := []testCase{
+		// Test bytes (B)
+		{0, "0 B"},
+		{1, "1 B"},
+		{1023, "1023 B"},
+
+		// Test kibibytes (KiB)
+		{1024, "1.0 KiB"},
+		{1536, "1.5 KiB"},
+		{1048575, "1024.0 KiB"},
+
+		// Test mebibytes (MiB)
+		{1048576, "1.0 MiB"},
+		{1572864, "1.5 MiB"},
+		{1073741823, "1024.0 MiB"},
+
+		// Test gibibytes (GiB)
+		{1073741824, "1.0 GiB"},
+		{1610612736, "1.5 GiB"},
+		{2147483648, "2.0 GiB"},
+	}
+
+	for _, tc := range tests {
+		t.Run(tc.expected, func(t *testing.T) {
+			result := HumanBytes2(tc.input)
+			if result != tc.expected {
+				t.Errorf("Expected %s, got %s", tc.expected, result)
+			}
+		})
+	}
+}
diff --git a/format/format.go b/format/format.go
new file mode 100644
index 0000000..ac50570
--- /dev/null
+++ b/format/format.go
@@ -0,0 +1,34 @@
+package format
+
+import (
+	"fmt"
+	"math"
+	"strconv"
+)
+
+const (
+	Thousand = 1000
+	Million  = Thousand * 1000
+	Billion  = Million * 1000
+)
+
+func HumanNumber(b uint64) string {
+	switch {
+	case b >= Billion:
+		number := float64(b) / Billion
+		if number == math.Floor(number) {
+			return fmt.Sprintf("%.0fB", number) // no decimals if whole number
+		}
+		return fmt.Sprintf("%.1fB", number) // one decimal if not a whole number
+	case b >= Million:
+		number := float64(b) / Million
+		if number == math.Floor(number) {
+			return fmt.Sprintf("%.0fM", number) // no decimals if whole number
+		}
+		return fmt.Sprintf("%.2fM", number) // two decimals if not a whole number
+	case b >= Thousand:
+		return fmt.Sprintf("%.0fK", float64(b)/Thousand)
+	default:
+		return strconv.FormatUint(b, 10)
+	}
+}
diff --git a/format/format_test.go b/format/format_test.go
new file mode 100644
index 0000000..f6aff2d
--- /dev/null
+++ b/format/format_test.go
@@ -0,0 +1,36 @@
+package format
+
+import (
+	"testing"
+)
+
+func TestHumanNumber(t *testing.T) {
+	type testCase struct {
+		input    uint64
+		expected string
+	}
+
+	testCases := []testCase{
+		{0, "0"},
+		{999, "999"},
+		{1000, "1K"},
+		{1001, "1K"},
+		{1000000, "1M"},
+		{125000000, "125M"},
+		{500500000, "500.50M"},
+		{500550000, "500.55M"},
+		{1000000000, "1B"},
+		{2800000000, "2.8B"},
+		{2850000000, "2.9B"},
+		{1000000000000, "1000B"},
+	}
+
+	for _, tc := range testCases {
+		t.Run(tc.expected, func(t *testing.T) {
+			result := HumanNumber(tc.input)
+			if result != tc.expected {
+				t.Errorf("Expected %s, got %s", tc.expected, result)
+			}
+		})
+	}
+}
diff --git a/format/time.go b/format/time.go
new file mode 100644
index 0000000..7406284
--- /dev/null
+++ b/format/time.go
@@ -0,0 +1,70 @@
+package format
+
+import (
+	"fmt"
+	"math"
+	"strings"
+	"time"
+)
+
+// humanDuration returns a human-readable approximation of a
+// duration (eg. "About a minute", "4 hours ago", etc.).
+func humanDuration(d time.Duration) string {
+	seconds := int(d.Seconds())
+
+	switch {
+	case seconds < 1:
+		return "Less than a second"
+	case seconds == 1:
+		return "1 second"
+	case seconds < 60:
+		return fmt.Sprintf("%d seconds", seconds)
+	}
+
+	minutes := int(d.Minutes())
+	switch {
+	case minutes == 1:
+		return "About a minute"
+	case minutes < 60:
+		return fmt.Sprintf("%d minutes", minutes)
+	}
+
+	hours := int(math.Round(d.Hours()))
+	switch {
+	case hours == 1:
+		return "About an hour"
+	case hours < 48:
+		return fmt.Sprintf("%d hours", hours)
+	case hours < 24*7*2:
+		return fmt.Sprintf("%d days", hours/24)
+	case hours < 24*30*2:
+		return fmt.Sprintf("%d weeks", hours/24/7)
+	case hours < 24*365*2:
+		return fmt.Sprintf("%d months", hours/24/30)
+	}
+
+	return fmt.Sprintf("%d years", int(d.Hours())/24/365)
+}
+
+func HumanTime(t time.Time, zeroValue string) string {
+	return humanTime(t, zeroValue)
+}
+
+func HumanTimeLower(t time.Time, zeroValue string) string {
+	return strings.ToLower(humanTime(t, zeroValue))
+}
+
+func humanTime(t time.Time, zeroValue string) string {
+	if t.IsZero() {
+		return zeroValue
+	}
+
+	delta := time.Since(t)
+	if int(delta.Hours())/24/365 < -20 {
+		return "Forever"
+	} else if delta < 0 {
+		return humanDuration(-delta) + " from now"
+	}
+
+	return humanDuration(delta) + " ago"
+}
diff --git a/format/time_test.go b/format/time_test.go
new file mode 100644
index 0000000..d0f8934
--- /dev/null
+++ b/format/time_test.go
@@ -0,0 +1,45 @@
+package format
+
+import (
+	"testing"
+	"time"
+)
+
+func assertEqual(t *testing.T, a any, b any) {
+	if a != b {
+		t.Errorf("Assert failed, expected %v, got %v", b, a)
+	}
+}
+
+func TestHumanTime(t *testing.T) {
+	now := time.Now()
+
+	t.Run("zero value", func(t *testing.T) {
+		assertEqual(t, HumanTime(time.Time{}, "never"), "never")
+	})
+
+	t.Run("time in the future", func(t *testing.T) {
+		v := now.Add(48 * time.Hour)
+		assertEqual(t, HumanTime(v, ""), "2 days from now")
+	})
+
+	t.Run("time in the past", func(t *testing.T) {
+		v := now.Add(-48 * time.Hour)
+		assertEqual(t, HumanTime(v, ""), "2 days ago")
+	})
+
+	t.Run("soon", func(t *testing.T) {
+		v := now.Add(800 * time.Millisecond)
+		assertEqual(t, HumanTime(v, ""), "Less than a second from now")
+	})
+
+	t.Run("time way in the future", func(t *testing.T) {
+		v := now.Add(24 * time.Hour * 365 * 200)
+		assertEqual(t, HumanTime(v, ""), "Forever")
+	})
+
+	t.Run("time way in the future lowercase", func(t *testing.T) {
+		v := now.Add(24 * time.Hour * 365 * 200)
+		assertEqual(t, HumanTimeLower(v, ""), "forever")
+	})
+}
diff --git a/fs/config.go b/fs/config.go
new file mode 100644
index 0000000..89a1b13
--- /dev/null
+++ b/fs/config.go
@@ -0,0 +1,13 @@
+package fs
+
+type Config interface {
+	Architecture() string
+	String(string, ...string) string
+	Uint(string, ...uint32) uint32
+	Float(string, ...float32) float32
+	Bool(string, ...bool) bool
+
+	Strings(string, ...[]string) []string
+	Ints(string, ...[]int32) []int32
+	Floats(string, ...[]float32) []float32
+}
diff --git a/fs/ggml/ggml.go b/fs/ggml/ggml.go
new file mode 100644
index 0000000..aa85aec
--- /dev/null
+++ b/fs/ggml/ggml.go
@@ -0,0 +1,705 @@
+package ggml
+
+import (
+	"encoding/binary"
+	"errors"
+	"fmt"
+	"io"
+	"log/slog"
+	"slices"
+	"strings"
+
+	"github.com/ollama/ollama/fs/util/bufioutil"
+)
+
+type GGML struct {
+	container
+	model
+	Length int64
+}
+
+type model interface {
+	KV() KV
+	Tensors() Tensors
+}
+
+type KV map[string]any
+
+func (kv KV) Architecture() string {
+	return kv.String("general.architecture", "unknown")
+}
+
+func (kv KV) Kind() string {
+	return kv.String("general.type", "unknown")
+}
+
+func (kv KV) ParameterCount() uint64 {
+	return keyValue(kv, "general.parameter_count", uint64(0))
+}
+
+func (kv KV) FileType() FileType {
+	if t := kv.Uint("general.file_type"); t > 0 {
+		return FileType(t)
+	}
+
+	return FileTypeUnknown
+}
+
+func (kv KV) BlockCount() uint64 {
+	return uint64(kv.Uint("block_count"))
+}
+
+func (kv KV) EmbeddingLength() uint64 {
+	return uint64(kv.Uint("embedding_length"))
+}
+
+func (kv KV) HeadCount() uint64 {
+	return uint64(kv.Uint("attention.head_count"))
+}
+
+func (kv KV) HeadCountKV() uint64 {
+	return uint64(kv.Uint("attention.head_count_kv", 1))
+}
+
+func (kv KV) EmbeddingHeadCount() uint64 {
+	if heads := kv.HeadCount(); heads > 0 {
+		return kv.EmbeddingLength() / heads
+	}
+
+	return 0
+}
+
+func (kv KV) EmbeddingHeadCountK() uint64 {
+	return uint64(kv.Uint("attention.key_length", uint32(kv.EmbeddingHeadCount())))
+}
+
+func (kv KV) EmbeddingHeadCountV() uint64 {
+	return uint64(kv.Uint("attention.value_length", uint32(kv.EmbeddingHeadCount())))
+}
+
+func (kv KV) GQA() uint64 {
+	return kv.HeadCount() / kv.HeadCountKV()
+}
+
+func (kv KV) ContextLength() uint64 {
+	return uint64(kv.Uint("context_length"))
+}
+
+func (kv KV) ChatTemplate() string {
+	return kv.String("tokenizer.chat_template")
+}
+
+func (kv KV) String(key string, defaultValue ...string) string {
+	return keyValue(kv, key, append(defaultValue, "")...)
+}
+
+func (kv KV) Uint(key string, defaultValue ...uint32) uint32 {
+	return keyValue(kv, key, append(defaultValue, 0)...)
+}
+
+func (kv KV) Float(key string, defaultValue ...float32) float32 {
+	return keyValue(kv, key, append(defaultValue, 0)...)
+}
+
+func (kv KV) Bool(key string, defaultValue ...bool) bool {
+	return keyValue(kv, key, append(defaultValue, false)...)
+}
+
+func (kv KV) Strings(key string, defaultValue ...[]string) []string {
+	return keyValue(kv, key, &array[string]{values: append(defaultValue, []string(nil))[0]}).values
+}
+
+func (kv KV) Ints(key string, defaultValue ...[]int32) []int32 {
+	return keyValue(kv, key, &array[int32]{values: append(defaultValue, []int32(nil))[0]}).values
+}
+
+func (kv KV) Uints(key string, defaultValue ...[]uint32) []uint32 {
+	return keyValue(kv, key, &array[uint32]{values: append(defaultValue, []uint32(nil))[0]}).values
+}
+
+func (kv KV) Floats(key string, defaultValue ...[]float32) []float32 {
+	return keyValue(kv, key, &array[float32]{values: append(defaultValue, []float32(nil))[0]}).values
+}
+
+func (kv KV) OllamaEngineRequired() bool {
+	return slices.Contains([]string{
+		"gemma3",
+		"mistral3",
+		"llama4",
+		"mllama",
+		"qwen25vl",
+	}, kv.Architecture())
+}
+
+type valueTypes interface {
+	uint8 | int8 | uint16 | int16 |
+		uint32 | int32 | uint64 | int64 |
+		string | float32 | float64 | bool
+}
+
+type arrayValueTypes interface {
+	*array[uint8] | *array[int8] | *array[uint16] | *array[int16] |
+		*array[uint32] | *array[int32] | *array[uint64] | *array[int64] |
+		*array[string] | *array[float32] | *array[float64] | *array[bool]
+}
+
+func keyValue[T valueTypes | arrayValueTypes](kv KV, key string, defaultValue ...T) T {
+	if !strings.HasPrefix(key, "tokenizer.") && !strings.HasPrefix(key, "general.") {
+		key = kv.Architecture() + "." + key
+	}
+
+	if val, ok := kv[key]; ok {
+		return val.(T)
+	}
+
+	slog.Debug("key not found", "key", key, "default", defaultValue[0])
+	return defaultValue[0]
+}
+
+type Tensors struct {
+	items  []*Tensor
+	Offset uint64
+}
+
+func (s Tensors) Items(prefix ...string) []*Tensor {
+	if len(prefix) == 0 {
+		return s.items
+	}
+
+	var items []*Tensor
+	for _, t := range s.items {
+		if strings.HasPrefix(t.Name, prefix[0]) {
+			items = append(items, t)
+		}
+	}
+
+	return items
+}
+
+func (ts Tensors) GroupLayers() map[string]Layer {
+	layers := make(map[string]Layer)
+	for _, t := range ts.items {
+		parts := strings.Split(t.Name, ".")
+		if index := slices.IndexFunc(parts, func(s string) bool { return s == "blk" || s == "mm" }); index != -1 {
+			if len(parts) > index+2 {
+				// blk and mm should have a number after them, join it
+				parts = append(
+					[]string{strings.Join(parts[:index+2], ".")},
+					parts[index+2:]...)
+			}
+		}
+
+		if _, ok := layers[parts[0]]; !ok {
+			layers[parts[0]] = make(Layer)
+		}
+
+		layers[parts[0]][strings.Join(parts[1:], ".")] = t
+	}
+
+	return layers
+}
+
+type Layer map[string]*Tensor
+
+func (l Layer) Size() (size uint64) {
+	for _, t := range l {
+		size += t.Size()
+	}
+
+	return size
+}
+
+type Tensor struct {
+	Name   string `json:"name"`
+	Kind   uint32 `json:"kind"`
+	Offset uint64 `json:"-"`
+
+	// Shape is the number of elements in each dimension
+	Shape []uint64 `json:"shape"`
+
+	io.WriterTo `json:"-"`
+}
+
+func (t Tensor) block() (n int) {
+	if _, err := fmt.Sscanf(t.Name, "blk.%d.", &n); err != nil {
+		return -1
+	}
+
+	return
+}
+
+func (t Tensor) blockSize() uint64 {
+	return (TensorType)(t.Kind).BlockSize()
+}
+
+func (t TensorType) BlockSize() uint64 {
+	switch t {
+	case
+		0,  // F32
+		1,  // F16
+		24, // I8
+		25, // I16
+		26, // I32
+		27, // I64
+		28, // F64
+		30: // BF16
+		return 1
+	case
+		2,  // Q4_0
+		3,  // Q4_1
+		6,  // Q5_0
+		7,  // Q5_1
+		8,  // Q8_0
+		9,  // Q8_1
+		20: // IQ4_NL
+		return 32
+	default:
+		return 256
+	}
+}
+
+func (t Tensor) typeSize() uint64 {
+	return TensorType(t.Kind).TypeSize()
+}
+
+func (t TensorType) TypeSize() uint64 {
+	blockSize := t.BlockSize()
+
+	switch t {
+	case TensorTypeF32:
+		return 4
+	case TensorTypeF16:
+		return 2
+	case TensorTypeQ4_0:
+		return 2 + blockSize/2
+	case TensorTypeQ4_1:
+		return 2 + 2 + blockSize/2
+	case TensorTypeQ5_0:
+		return 2 + 4 + blockSize/2
+	case TensorTypeQ5_1:
+		return 2 + 2 + 4 + blockSize/2
+	case TensorTypeQ8_0:
+		return 2 + blockSize
+	case TensorTypeQ8_1:
+		return 2 + 2 + blockSize
+	case TensorTypeQ2_K:
+		return blockSize/16 + blockSize/4 + 2 + 2
+	case TensorTypeQ3_K:
+		return blockSize/8 + blockSize/4 + 12 + 2
+	case TensorTypeQ4_K:
+		return 2 + 2 + 12 + blockSize/2
+	case TensorTypeQ5_K:
+		return 2 + 2 + 12 + blockSize/8 + blockSize/2
+	case TensorTypeQ6_K:
+		return blockSize/2 + blockSize/4 + blockSize/16 + 2
+	case TensorTypeQ8_K:
+		return 4 + blockSize + 2*blockSize/16
+	case tensorTypeIQ2_XXS:
+		return 2 + 2*blockSize/8
+	case tensorTypeIQ2_XS:
+		return 2 + 2*blockSize/8 + blockSize/32
+	case tensorTypeIQ3_XXS:
+		return 2 + blockSize/4 + blockSize/8
+	case tensorTypeIQ1_S:
+		return 2 + blockSize/8 + blockSize/16
+	case tensorTypeIQ4_NL:
+		return 2 + blockSize/2
+	case tensorTypeIQ3_S:
+		return 2 + blockSize/4 + blockSize/8 + blockSize/32 + 4
+	case tensorTypeIQ2_S:
+		return 2 + blockSize/4 + blockSize/16
+	case tensorTypeIQ4_XS:
+		return 2 + 2 + blockSize/2 + blockSize/64
+	case TensorTypeI8:
+		return 1
+	case TensorTypeI16:
+		return 2
+	case TensorTypeI32:
+		return 4
+	case TensorTypeI64:
+		return 8
+	case TensorTypeF64:
+		return 8
+	case tensorTypeIQ1_M:
+		return blockSize/8 + blockSize/16 + blockSize/32
+	case TensorTypeBF16:
+		return 2
+	default:
+		return 0
+	}
+}
+
+func (t Tensor) Elements() uint64 {
+	var count uint64 = 1
+	for _, n := range t.Shape {
+		count *= n
+	}
+	return count
+}
+
+func (t Tensor) Size() uint64 {
+	return t.Elements() * t.typeSize() / t.blockSize()
+}
+
+func (t Tensor) Type() string {
+	return TensorType(t.Kind).String()
+}
+
+type container interface {
+	Name() string
+	Decode(io.ReadSeeker) (model, error)
+}
+
+const (
+	// Magic constant for `ggml` files (unversioned).
+	FILE_MAGIC_GGML = 0x67676d6c
+	// Magic constant for `ggml` files (versioned, ggmf).
+	FILE_MAGIC_GGMF = 0x67676d66
+	// Magic constant for `ggml` files (versioned, ggjt).
+	FILE_MAGIC_GGJT = 0x67676a74
+	// Magic constant for `ggla` files (LoRA adapter).
+	FILE_MAGIC_GGLA = 0x67676C61
+	// Magic constant for `gguf` files (versioned, gguf)
+	FILE_MAGIC_GGUF_LE = 0x46554747
+	FILE_MAGIC_GGUF_BE = 0x47475546
+)
+
+var ErrUnsupportedFormat = errors.New("unsupported model format")
+
+func DetectContentType(b []byte) string {
+	switch binary.LittleEndian.Uint32(b[:4]) {
+	case FILE_MAGIC_GGML:
+		return "ggml"
+	case FILE_MAGIC_GGMF:
+		return "ggmf"
+	case FILE_MAGIC_GGJT:
+		return "ggjt"
+	case FILE_MAGIC_GGLA:
+		return "ggla"
+	case FILE_MAGIC_GGUF_LE, FILE_MAGIC_GGUF_BE:
+		return "gguf"
+	default:
+		return ""
+	}
+}
+
+// Decode decodes a GGML model from the given reader.
+//
+// It collects array values for arrays with a size less than or equal to
+// maxArraySize. If the maxArraySize is negative, all arrays are collected.
+func Decode(rs io.ReadSeeker, maxArraySize int) (*GGML, error) {
+	rs = bufioutil.NewBufferedSeeker(rs, 32<<10)
+
+	var magic uint32
+	if err := binary.Read(rs, binary.LittleEndian, &magic); err != nil {
+		return nil, err
+	}
+
+	var c container
+	switch magic {
+	case FILE_MAGIC_GGUF_LE:
+		c = &containerGGUF{ByteOrder: binary.LittleEndian, maxArraySize: maxArraySize}
+	case FILE_MAGIC_GGUF_BE:
+		c = &containerGGUF{ByteOrder: binary.BigEndian, maxArraySize: maxArraySize}
+	default:
+		return nil, errors.New("invalid file magic")
+	}
+
+	model, err := c.Decode(rs)
+	if err != nil {
+		return nil, err
+	}
+
+	offset, err := rs.Seek(0, io.SeekCurrent)
+	if err != nil {
+		return nil, err
+	}
+
+	// final model type
+	return &GGML{
+		container: c,
+		model:     model,
+		Length:    offset,
+	}, nil
+}
+
+func (f GGML) GraphSize(context, batch uint64, numParallel int, kvCacheType string) (kv []uint64, partialOffload, fullOffload uint64) {
+	embedding := f.KV().EmbeddingLength()
+	heads := f.KV().HeadCount()
+	headsKV := f.KV().HeadCountKV()
+	vocab := uint64(f.KV()["tokenizer.ggml.tokens"].(*array[string]).size)
+
+	embeddingHeads := f.KV().EmbeddingHeadCount()
+	embeddingHeadsK := f.KV().EmbeddingHeadCountK()
+	embeddingHeadsV := f.KV().EmbeddingHeadCountV()
+
+	layers := f.Tensors().GroupLayers()
+
+	bytesPerElement := kvCacheBytesPerElement(kvCacheType)
+	kv = make([]uint64, f.KV().BlockCount())
+	for i := range kv {
+		kv[i] = uint64(float64(context*(embeddingHeadsK+embeddingHeadsV)*headsKV) * bytesPerElement)
+	}
+
+	switch f.KV().Architecture() {
+	case "llama", "llama4":
+		fullOffload = max(
+			4*batch*(1+4*embedding+context*(1+heads)),
+			4*batch*(embedding+vocab),
+		)
+
+		partialOffload = 4 * batch * embedding
+		partialOffload += max(
+			4*batch*(1+embedding+max(context, embedding))+embedding*embedding*9/16+4*context*(batch*heads+embeddingHeads*headsKV),
+			4*batch*(embedding+vocab)+embedding*vocab*105/128,
+		)
+
+		if ffnGateExpsWeight, ok := layers["blk.0"]["ffn_gate_exps.weight"]; ok {
+			// mixtral 8x22b
+			ff := uint64(f.KV().Uint("feed_forward_length"))
+			partialOffload = max(
+				3*ffnGateExpsWeight.Size()+4*batch*(2*ff+headsKV+embedding+context+embeddingHeads*headsKV),
+				4*(context*batch*heads+context*embeddingHeads*headsKV+batch*1024+embeddingHeads*headsKV*batch),
+			)
+		} else if ffnGateWeight, ok := layers["blk.0"]["ffn_gate.0.weight"]; ok {
+			// mixtral 8x7b
+			ffnGateWeight1 := ffnGateWeight.Shape[1]
+			fullOffload = 4 * batch * (2 + 3*embedding + context*(1+heads) + 2*headsKV + ffnGateWeight1)
+			partialOffload = max(
+				4*batch*(3+embeddingHeads*headsKV+embedding+context*(1+heads)+ffnGateWeight1)+(embedding*embedding+3*embedding*headsKV*ffnGateWeight1)*9/16,
+				4*batch*(1+2*embedding+context*(1+heads))+embedding*(6*context*headsKV/heads+embedding*9/16),
+			)
+		}
+	case "mllama":
+		var visionTokens, tiles uint64 = 1601, 4
+
+		crossAttentionLayers := f.KV().Ints("attention.cross_attention_layers")
+		for i := range kv {
+			if slices.Contains(crossAttentionLayers, int32(i)) {
+				kv[i] = headsKV * (embeddingHeadsK + embeddingHeadsV) *
+					4 * // sizeof(float32)
+					visionTokens *
+					tiles
+			}
+		}
+
+		fullOffload = max(
+			4*batch*(2+3*embedding+embeddingHeadsK*heads+context*(1+heads)),
+			// vocab graph
+			4*batch*(embedding+vocab),
+		)
+
+		var ropeFreqsCount uint64
+		if ropeFreqs, ok := f.Tensors().GroupLayers()["rope_freqs"]; ok {
+			if ropeFreqsWeights, ok := ropeFreqs["weights"]; ok {
+				ropeFreqsCount = ropeFreqsWeights.Elements()
+			}
+		}
+
+		partialOffload = max(
+			4*(batch*
+				(2*embedding+1+context*(1+heads)+embeddingHeadsK*heads)+
+				ropeFreqsCount+
+				embeddingHeadsK*context*headsKV),
+			// vocab graph
+			4*batch*(embedding+vocab)+embedding*vocab*105/128,
+		)
+	case "gemma", "gemma2", "gemma3":
+		fullOffload = max(
+			4*batch*(embedding+vocab),
+			4*batch*(2+context+context*heads+2*embedding+2*embeddingHeadsK*heads),
+		)
+
+		partialOffload = max(
+			4*embedding*batch+embedding*vocab*105/128+4*vocab*batch,
+			4*batch*(2*embedding+1+2*embeddingHeadsK*heads+context+context*heads)+
+				4*embeddingHeadsK*context*8+
+				embedding*embeddingHeadsK*heads*9/16,
+		)
+
+		// Gemma2 also has sliding window attention but we only have an optimized implementation in the Ollama
+		// engine. Gemma3 always uses the Ollama engine.
+		if f.KV().Architecture() == "gemma3" {
+			const gemma3GlobalCacheCount = 6
+			slidingWindow := (uint64(numParallel) * uint64(f.KV().Uint("attention.sliding_window"))) + batch
+			for i := range kv {
+				// Every 6th layer is a global layer, which is the full context size that has already been set. The other
+				// layers are the smaller local (sliding) layers.
+				if (i+1)%gemma3GlobalCacheCount != 0 {
+					kv[i] = uint64(float64(slidingWindow*(embeddingHeadsK+embeddingHeadsV)*headsKV) * bytesPerElement)
+				}
+			}
+		}
+	case "command-r":
+		fullOffload = max(
+			4*batch*(embedding+vocab),
+			4*batch*(2+4*embedding+context*(1+heads)),
+		)
+
+		partialOffload = max(
+			4*batch*(embedding+vocab)+embedding*vocab*105/128,
+			4*batch*(1+2*embedding+context*(1+heads))+4*embedding*context+embedding*embedding*9/16,
+		)
+	case "qwen2":
+		fullOffload = max(
+			4*batch*(embedding+vocab),
+			4*batch*(1+2*embedding+context+context*heads),
+		)
+
+		partialOffload = max(
+			4*batch*(embedding+vocab)+embedding*vocab*105/128,
+			4*(batch*(1+2*embedding+context*(1+heads))+embedding*(1+context)),
+		)
+	case "phi2":
+		fullOffload = max(
+			4*batch*(embedding+vocab),
+			4*batch*(1+4*embedding+context+context*heads),
+		)
+
+		partialOffload = max(
+			4*batch*(2*embedding+vocab)+embedding*vocab*105/128,
+			4*batch*(2+3*embedding+context+context*heads),
+		)
+	case "stablelm":
+		fullOffload = 4 * batch * (context*(1+heads) + 3*embedding + 2)
+		partialOffload = max(
+			4*batch*(vocab+2*embedding),
+			fullOffload,
+		)
+	case "deepseek2":
+		fullOffload = max(
+			4*batch*(3*embedding+vocab),
+			4*batch*(3*embedding+2+context*(1+headsKV)+2*embeddingHeadsK*headsKV),
+		)
+
+		partialOffload = max(
+			4*batch*(3*embedding+vocab)+embedding*vocab*105/128,
+			4*batch*(2*embedding+1+2*embeddingHeadsK*headsKV+context+context*headsKV)+4*embeddingHeadsK*context*headsKV+embedding*embeddingHeadsK*headsKV*9/16,
+		)
+	case "chatglm":
+		fullOffload = 4 * batch * (embedding + vocab)
+		partialOffload = 4*batch*(embedding+vocab) + embedding*vocab*105/128
+		if qkvBias, ok := layers["blk.0"]["attn_qkv.bias"]; ok {
+			fullOffload = max(
+				fullOffload,
+				4*batch*(2+
+					2*embedding+
+					context+
+					context*heads+
+					embeddingHeadsK*heads+
+					qkvBias.Shape[0]),
+			)
+
+			partialOffload = max(
+				partialOffload,
+				4*batch*(1+
+					2*embedding+
+					embeddingHeadsK*heads+
+					context+
+					context*heads)+
+					4*embeddingHeadsK*context+
+					4*context*embeddingHeadsK+
+					4*qkvBias.Shape[0],
+			)
+		}
+	}
+
+	return
+}
+
+func (llm GGML) VisionGraphSize() (weights, graphSize uint64) {
+	if llm.KV().Uint("vision.block_count") == 0 {
+		return
+	}
+
+	for name, layer := range llm.Tensors().GroupLayers() {
+		if name == "v" || strings.HasPrefix(name, "v.") {
+			for _, tensor := range layer {
+				weights += tensor.Size()
+			}
+		}
+	}
+
+	imageSize := uint64(llm.KV().Uint("vision.image_size"))
+	patchSize := uint64(llm.KV().Uint("vision.patch_size"))
+	if patchSize == 0 {
+		slog.Warn("unknown patch size for vision model")
+		return
+	}
+
+	numChannels := uint64(llm.KV().Uint("vision.num_channels"))
+
+	numPatches := (imageSize / patchSize) * (imageSize / patchSize)
+	if _, ok := llm.Tensors().GroupLayers()["v"]["class_embd"]; ok {
+		numPatches++
+	}
+
+	headCount := uint64(llm.KV().Uint("vision.attention.head_count"))
+	embeddingLength := uint64(llm.KV().Uint("vision.embedding_length"))
+
+	switch llm.KV().Architecture() {
+	case "mllama":
+		numPaddedPatches := numPatches + 8 - (numPatches%8)%8
+
+		maxNumTiles := uint64(llm.KV().Uint("vision.max_num_tiles"))
+
+		graphSize = 4 * (8 +
+			imageSize*imageSize*numChannels*maxNumTiles +
+			embeddingLength*numPatches*maxNumTiles +
+			9*embeddingLength*numPaddedPatches*maxNumTiles +
+			numPaddedPatches*maxNumTiles*numPaddedPatches*maxNumTiles*headCount)
+	case "gemma3", "mistral3":
+		graphSize = 4 * (imageSize*imageSize*numChannels +
+			embeddingLength*patchSize +
+			numPatches*numPatches*headCount)
+	case "qwen25vl":
+		maxPixels := uint64(llm.KV().Uint("vision.max_pixels", 28*28*1280))
+
+		numPatches := maxPixels / (patchSize * patchSize)
+
+		graphSize = 4 * (maxPixels*numChannels + // Original image storage
+			// Normalized pixels
+			maxPixels*numChannels +
+			// Patches storage (numPatches * channels * patchSize^2)
+			numPatches*numChannels*patchSize*patchSize +
+			// Self-attention calculations
+			numPatches*numPatches*headCount +
+			// Additional buffer for processing
+			embeddingLength*numPatches)
+	case "llama4":
+		// vision graph is computed independently in the same schedule
+		// and is negligible compared to the worst case text graph
+	}
+
+	return weights, graphSize
+}
+
+// SupportsKVCacheType checks if the requested cache type is supported
+func (f GGML) SupportsKVCacheType(cacheType string) bool {
+	return slices.Contains([]string{"f16", "q8_0", "q4_0"}, cacheType)
+}
+
+// SupportsFlashAttention checks if the model supports flash attention
+func (f GGML) SupportsFlashAttention() bool {
+	_, isEmbedding := f.KV()[fmt.Sprintf("%s.pooling_type", f.KV().Architecture())]
+	if isEmbedding {
+		return false
+	}
+
+	// Check head counts match and are non-zero
+	headCountK := f.KV().EmbeddingHeadCountK()
+	headCountV := f.KV().EmbeddingHeadCountV()
+	return headCountK != 0 && headCountV != 0 && headCountK == headCountV
+}
+
+// kvCacheBytesPerElement returns the number of bytes per element for a given KV cache type
+func kvCacheBytesPerElement(cacheType string) float64 {
+	switch cacheType {
+	case "q8_0":
+		return 1 // 1/2 of fp16
+	case "q4_0":
+		return 0.5 // 1/4 of fp16
+	default:
+		return 2 // f16 (default)
+	}
+}
diff --git a/fs/ggml/ggml_test.go b/fs/ggml/ggml_test.go
new file mode 100644
index 0000000..c1c1b43
--- /dev/null
+++ b/fs/ggml/ggml_test.go
@@ -0,0 +1,271 @@
+package ggml
+
+import (
+	"maps"
+	"math"
+	"slices"
+	"strconv"
+	"strings"
+	"testing"
+
+	"github.com/google/go-cmp/cmp"
+)
+
+func TestTensorLayers(t *testing.T) {
+	tensors := make(map[string]*Tensor)
+	for _, name := range []string{
+		"token_embd.weight",
+		"blk.0.attn_k.weight",
+		"blk.0.attn_output.weight",
+		"blk.0.attn_q.weight",
+		"blk.0.attn_v.weight",
+		"blk.0.attn_norm.weight",
+		"blk.0.ffn_down.weight",
+		"blk.0.ffn_gate.weight",
+		"blk.0.ffn_up.weight",
+		"blk.0.ffn_norm.weight",
+		"output_norm.weight",
+		"mm.0.bias",
+		"mm.0.weight",
+		"v.blk.0.attn_k.weight",
+		"v.blk.0.attn_output.weight",
+		"v.blk.0.attn_q.weight",
+		"v.blk.0.attn_v.weight",
+		"v.blk.0.attn_norm.weight",
+		"v.blk.0.ffn_down.weight",
+		"v.blk.0.ffn_gate.weight",
+		"v.blk.0.ffn_up.weight",
+		"v.blk.0.ffn_norm.weight",
+		"v.patch_embd.weight",
+		"v.position_embd.gate",
+		"v.position_embd.weight",
+	} {
+		tensors[name] = &Tensor{Name: name}
+	}
+
+	cases := []struct {
+		name  string
+		items []*Tensor
+		want  map[string]Layer
+	}{
+		{
+			name: "text",
+			items: slices.Collect(func(yield func(*Tensor) bool) {
+				for k, v := range tensors {
+					if !strings.HasPrefix(k, "mm.") && !strings.HasPrefix(k, "v.") {
+						if !yield(v) {
+							return
+						}
+					}
+				}
+			}),
+			want: map[string]Layer{
+				"blk.0": {
+					"attn_k.weight":      tensors["blk.0.attn_k.weight"],
+					"attn_q.weight":      tensors["blk.0.attn_q.weight"],
+					"attn_v.weight":      tensors["blk.0.attn_v.weight"],
+					"attn_output.weight": tensors["blk.0.attn_output.weight"],
+					"attn_norm.weight":   tensors["blk.0.attn_norm.weight"],
+					"ffn_down.weight":    tensors["blk.0.ffn_down.weight"],
+					"ffn_gate.weight":    tensors["blk.0.ffn_gate.weight"],
+					"ffn_up.weight":      tensors["blk.0.ffn_up.weight"],
+					"ffn_norm.weight":    tensors["blk.0.ffn_norm.weight"],
+				},
+				"token_embd":  {"weight": tensors["token_embd.weight"]},
+				"output_norm": {"weight": tensors["output_norm.weight"]},
+			},
+		},
+		{
+			name: "vision",
+			items: slices.Collect(func(yield func(*Tensor) bool) {
+				for k, v := range tensors {
+					if strings.HasPrefix(k, "mm.") || strings.HasPrefix(k, "v.") {
+						if !yield(v) {
+							return
+						}
+					}
+				}
+			}),
+			want: map[string]Layer{
+				"mm.0": {
+					"bias":   tensors["mm.0.bias"],
+					"weight": tensors["mm.0.weight"],
+				},
+				"v.blk.0": {
+					"attn_k.weight":      tensors["v.blk.0.attn_k.weight"],
+					"attn_q.weight":      tensors["v.blk.0.attn_q.weight"],
+					"attn_v.weight":      tensors["v.blk.0.attn_v.weight"],
+					"attn_output.weight": tensors["v.blk.0.attn_output.weight"],
+					"attn_norm.weight":   tensors["v.blk.0.attn_norm.weight"],
+					"ffn_down.weight":    tensors["v.blk.0.ffn_down.weight"],
+					"ffn_gate.weight":    tensors["v.blk.0.ffn_gate.weight"],
+					"ffn_up.weight":      tensors["v.blk.0.ffn_up.weight"],
+					"ffn_norm.weight":    tensors["v.blk.0.ffn_norm.weight"],
+				},
+				"v": {
+					"patch_embd.weight":    tensors["v.patch_embd.weight"],
+					"position_embd.gate":   tensors["v.position_embd.gate"],
+					"position_embd.weight": tensors["v.position_embd.weight"],
+				},
+			},
+		},
+		{
+			name:  "vision and text",
+			items: slices.Collect(maps.Values(tensors)),
+			want: map[string]Layer{
+				"blk.0": {
+					"attn_k.weight":      tensors["blk.0.attn_k.weight"],
+					"attn_q.weight":      tensors["blk.0.attn_q.weight"],
+					"attn_v.weight":      tensors["blk.0.attn_v.weight"],
+					"attn_output.weight": tensors["blk.0.attn_output.weight"],
+					"attn_norm.weight":   tensors["blk.0.attn_norm.weight"],
+					"ffn_down.weight":    tensors["blk.0.ffn_down.weight"],
+					"ffn_gate.weight":    tensors["blk.0.ffn_gate.weight"],
+					"ffn_up.weight":      tensors["blk.0.ffn_up.weight"],
+					"ffn_norm.weight":    tensors["blk.0.ffn_norm.weight"],
+				},
+				"token_embd":  {"weight": tensors["token_embd.weight"]},
+				"output_norm": {"weight": tensors["output_norm.weight"]},
+				"mm.0": {
+					"bias":   tensors["mm.0.bias"],
+					"weight": tensors["mm.0.weight"],
+				},
+				"v.blk.0": {
+					"attn_k.weight":      tensors["v.blk.0.attn_k.weight"],
+					"attn_q.weight":      tensors["v.blk.0.attn_q.weight"],
+					"attn_v.weight":      tensors["v.blk.0.attn_v.weight"],
+					"attn_output.weight": tensors["v.blk.0.attn_output.weight"],
+					"attn_norm.weight":   tensors["v.blk.0.attn_norm.weight"],
+					"ffn_down.weight":    tensors["v.blk.0.ffn_down.weight"],
+					"ffn_gate.weight":    tensors["v.blk.0.ffn_gate.weight"],
+					"ffn_up.weight":      tensors["v.blk.0.ffn_up.weight"],
+					"ffn_norm.weight":    tensors["v.blk.0.ffn_norm.weight"],
+				},
+				"v": {
+					"patch_embd.weight":    tensors["v.patch_embd.weight"],
+					"position_embd.gate":   tensors["v.position_embd.gate"],
+					"position_embd.weight": tensors["v.position_embd.weight"],
+				},
+			},
+		},
+	}
+
+	for _, tt := range cases {
+		t.Run(tt.name, func(t *testing.T) {
+			got := Tensors{items: tt.items}.GroupLayers()
+			if diff := cmp.Diff(got, tt.want); diff != "" {
+				t.Errorf("unexpected layers (-got +want):\n%s", diff)
+			}
+		})
+	}
+}
+
+// ref: https://github.com/ggml-org/llama.cpp/blob/a82c9e7c23ef6db48cebfa194dc9cebbc4ac3552/ggml/src/ggml.c#L572
+func TestTensorTypes(t *testing.T) {
+	cases := []struct {
+		kind      uint32
+		blockSize uint64
+		typeSize  uint64
+	}{
+		{0, 1, 4},
+		{1, 1, 2},
+		{2, 32, 18},
+		{3, 32, 20},
+		{6, 32, 22},
+		{7, 32, 24},
+		{8, 32, 34},
+		{9, 32, 36},
+		{10, 256, 84},
+		{11, 256, 110},
+		{12, 256, 144},
+		{13, 256, 176},
+		{14, 256, 210},
+		{15, 256, 292},
+		{16, 256, 66},
+		{17, 256, 74},
+		{18, 256, 98},
+		{19, 256, 50},
+		{20, 32, 18},
+		{21, 256, 110},
+		{22, 256, 82},
+		{23, 256, 136},
+		{24, 1, 1},
+		{25, 1, 2},
+		{26, 1, 4},
+		{27, 1, 8},
+		{28, 1, 8},
+		{29, 256, 56},
+		{30, 1, 2},
+	}
+
+	for _, tt := range cases {
+		t.Run(strconv.Itoa(int(tt.kind)), func(t *testing.T) {
+			tensor := Tensor{Kind: tt.kind}
+			if tensor.blockSize() != tt.blockSize {
+				t.Errorf("unexpected block size: got=%d want=%d", tensor.blockSize(), tt.blockSize)
+			}
+
+			if tensor.typeSize() != tt.typeSize {
+				t.Errorf("unexpected type size: got=%d want=%d", tensor.typeSize(), tt.typeSize)
+			}
+		})
+	}
+}
+
+func TestKeyValue(t *testing.T) {
+	kv := KV{
+		"general.architecture": "test",
+		"test.strings":         &array[string]{size: 3, values: []string{"a", "b", "c"}},
+		"test.float32s":        &array[float32]{size: 3, values: []float32{1.0, 2.0, 3.0}},
+		"test.int32s":          &array[int32]{size: 3, values: []int32{1, 2, 3}},
+		"test.uint32s":         &array[uint32]{size: 3, values: []uint32{1, 2, 3}},
+	}
+
+	if diff := cmp.Diff(kv.Strings("strings"), []string{"a", "b", "c"}); diff != "" {
+		t.Errorf("unexpected strings (-got +want):\n%s", diff)
+	}
+
+	if diff := cmp.Diff(kv.Strings("nonexistent.strings"), []string(nil)); diff != "" {
+		t.Errorf("unexpected strings (-got +want):\n%s", diff)
+	}
+
+	if diff := cmp.Diff(kv.Strings("default.strings", []string{"ollama"}), []string{"ollama"}); diff != "" {
+		t.Errorf("unexpected strings (-got +want):\n%s", diff)
+	}
+
+	if diff := cmp.Diff(kv.Floats("float32s"), []float32{1.0, 2.0, 3.0}); diff != "" {
+		t.Errorf("unexpected float32s (-got +want):\n%s", diff)
+	}
+
+	if diff := cmp.Diff(kv.Floats("nonexistent.float32s"), []float32(nil)); diff != "" {
+		t.Errorf("unexpected float32s (-got +want):\n%s", diff)
+	}
+
+	if diff := cmp.Diff(kv.Floats("default.float32s", []float32{math.MaxFloat32}), []float32{math.MaxFloat32}); diff != "" {
+		t.Errorf("unexpected float32s (-got +want):\n%s", diff)
+	}
+
+	if diff := cmp.Diff(kv.Ints("int32s"), []int32{1, 2, 3}); diff != "" {
+		t.Errorf("unexpected int8s (-got +want):\n%s", diff)
+	}
+
+	if diff := cmp.Diff(kv.Ints("nonexistent.int32s"), []int32(nil)); diff != "" {
+		t.Errorf("unexpected int8s (-got +want):\n%s", diff)
+	}
+
+	if diff := cmp.Diff(kv.Ints("default.int32s", []int32{math.MaxInt32}), []int32{math.MaxInt32}); diff != "" {
+		t.Errorf("unexpected int8s (-got +want):\n%s", diff)
+	}
+
+	if diff := cmp.Diff(kv.Uints("uint32s"), []uint32{1, 2, 3}); diff != "" {
+		t.Errorf("unexpected uint8s (-got +want):\n%s", diff)
+	}
+
+	if diff := cmp.Diff(kv.Uints("nonexistent.uint32s"), []uint32(nil)); diff != "" {
+		t.Errorf("unexpected uint8s (-got +want):\n%s", diff)
+	}
+
+	if diff := cmp.Diff(kv.Uints("default.uint32s", []uint32{math.MaxUint32}), []uint32{math.MaxUint32}); diff != "" {
+		t.Errorf("unexpected uint8s (-got +want):\n%s", diff)
+	}
+}
diff --git a/fs/ggml/gguf.go b/fs/ggml/gguf.go
new file mode 100644
index 0000000..8e75625
--- /dev/null
+++ b/fs/ggml/gguf.go
@@ -0,0 +1,654 @@
+package ggml
+
+import (
+	"bytes"
+	"cmp"
+	"encoding/binary"
+	"encoding/json"
+	"fmt"
+	"io"
+	"log/slog"
+	"maps"
+	"os"
+	"runtime"
+	"slices"
+	"strings"
+
+	"golang.org/x/sync/errgroup"
+)
+
+type containerGGUF struct {
+	ByteOrder binary.ByteOrder
+
+	Version uint32
+
+	V1 struct {
+		NumTensor uint32
+		NumKV     uint32
+	}
+
+	V2 struct {
+		NumTensor uint64
+		NumKV     uint64
+	}
+
+	V3 struct {
+		NumTensor uint64
+		NumKV     uint64
+	}
+
+	maxArraySize int
+}
+
+func (c *containerGGUF) Name() string {
+	return "gguf"
+}
+
+func (c *containerGGUF) Decode(rs io.ReadSeeker) (model, error) {
+	if err := binary.Read(rs, c.ByteOrder, &c.Version); err != nil {
+		return nil, err
+	}
+
+	var err error
+	switch c.Version {
+	case 1:
+		err = binary.Read(rs, c.ByteOrder, &c.V1)
+	case 2:
+		err = binary.Read(rs, c.ByteOrder, &c.V2)
+	default:
+		err = binary.Read(rs, c.ByteOrder, &c.V3)
+	}
+	if err != nil {
+		return nil, err
+	}
+
+	model := newGGUF(c)
+	if err := model.Decode(rs); err != nil {
+		return nil, err
+	}
+
+	return model, nil
+}
+
+const (
+	ggufTypeUint8 uint32 = iota
+	ggufTypeInt8
+	ggufTypeUint16
+	ggufTypeInt16
+	ggufTypeUint32
+	ggufTypeInt32
+	ggufTypeFloat32
+	ggufTypeBool
+	ggufTypeString
+	ggufTypeArray
+	ggufTypeUint64
+	ggufTypeInt64
+	ggufTypeFloat64
+)
+
+type gguf struct {
+	*containerGGUF
+
+	kv      KV
+	tensors []*Tensor
+
+	parameters   uint64
+	tensorOffset uint64
+
+	scratch [16 << 10]byte
+}
+
+func newGGUF(container *containerGGUF) *gguf {
+	return &gguf{
+		containerGGUF: container,
+		kv:            make(KV),
+	}
+}
+
+func (llm *gguf) KV() KV {
+	return llm.kv
+}
+
+func (llm *gguf) Tensors() Tensors {
+	return Tensors{
+		items:  llm.tensors,
+		Offset: llm.tensorOffset,
+	}
+}
+
+func (llm *gguf) numTensor() uint64 {
+	switch llm.Version {
+	case 1:
+		return uint64(llm.V1.NumTensor)
+	case 2:
+		return llm.V2.NumTensor
+	default:
+		return llm.V3.NumTensor
+	}
+}
+
+func (llm *gguf) numKV() uint64 {
+	switch llm.Version {
+	case 1:
+		return uint64(llm.V1.NumKV)
+	case 2:
+		return llm.V2.NumKV
+	default:
+		return llm.V3.NumKV
+	}
+}
+
+func (llm *gguf) Decode(rs io.ReadSeeker) error {
+	// decode key-values
+	for i := 0; uint64(i) < llm.numKV(); i++ {
+		k, err := readGGUFString(llm, rs)
+		if err != nil {
+			return err
+		}
+
+		t, err := readGGUF[uint32](llm, rs)
+		if err != nil {
+			return err
+		}
+
+		var v any
+		switch t {
+		case ggufTypeUint8:
+			v, err = readGGUF[uint8](llm, rs)
+		case ggufTypeInt8:
+			v, err = readGGUF[int8](llm, rs)
+		case ggufTypeUint16:
+			v, err = readGGUF[uint16](llm, rs)
+		case ggufTypeInt16:
+			v, err = readGGUF[int16](llm, rs)
+		case ggufTypeUint32:
+			v, err = readGGUF[uint32](llm, rs)
+		case ggufTypeInt32:
+			v, err = readGGUF[int32](llm, rs)
+		case ggufTypeUint64:
+			v, err = readGGUF[uint64](llm, rs)
+		case ggufTypeInt64:
+			v, err = readGGUF[int64](llm, rs)
+		case ggufTypeFloat32:
+			v, err = readGGUF[float32](llm, rs)
+		case ggufTypeFloat64:
+			v, err = readGGUF[float64](llm, rs)
+		case ggufTypeBool:
+			v, err = readGGUF[bool](llm, rs)
+		case ggufTypeString:
+			v, err = readGGUFString(llm, rs)
+		case ggufTypeArray:
+			v, err = readGGUFArray(llm, rs)
+		default:
+			return fmt.Errorf("invalid type: %d", t)
+		}
+
+		if err != nil {
+			return err
+		}
+
+		llm.kv[k] = v
+	}
+
+	// decode tensors
+	for range llm.numTensor() {
+		name, err := readGGUFString(llm, rs)
+		if err != nil {
+			return fmt.Errorf("failed to read tensor name: %w", err)
+		}
+
+		// dims is the number of dimensions in the tensor
+		dims, err := readGGUF[uint32](llm, rs)
+		if err != nil {
+			return fmt.Errorf("failed to read tensor dimensions: %w", err)
+		}
+
+		shape := make([]uint64, dims)
+		for i := 0; uint32(i) < dims; i++ {
+			shape[i], err = readGGUF[uint64](llm, rs)
+			if err != nil {
+				return fmt.Errorf("failed to read tensor shape: %w", err)
+			}
+		}
+
+		kind, err := readGGUF[uint32](llm, rs)
+		if err != nil {
+			return fmt.Errorf("failed to read tensor kind: %w", err)
+		}
+
+		offset, err := readGGUF[uint64](llm, rs)
+		if err != nil {
+			return fmt.Errorf("failed to read tensor offset: %w", err)
+		}
+
+		tensor := Tensor{
+			Name:   name,
+			Kind:   kind,
+			Offset: offset,
+			Shape:  shape[:],
+		}
+
+		llm.tensors = append(llm.tensors, &tensor)
+		llm.parameters += tensor.Elements()
+	}
+
+	// patch KV with parameter count
+	llm.kv["general.parameter_count"] = llm.parameters
+
+	alignment := llm.kv.Uint("general.alignment", 32)
+
+	offset, err := rs.Seek(0, io.SeekCurrent)
+	if err != nil {
+		return err
+	}
+
+	padding := ggufPadding(offset, int64(alignment))
+	llm.tensorOffset = uint64(offset + padding)
+
+	for _, tensor := range llm.tensors {
+		offset, err := rs.Seek(0, io.SeekCurrent)
+		if err != nil {
+			return fmt.Errorf("failed to get current offset: %w", err)
+		}
+
+		padding := ggufPadding(offset, int64(alignment))
+		if _, err := rs.Seek(padding, io.SeekCurrent); err != nil {
+			return fmt.Errorf("failed to seek to init padding: %w", err)
+		}
+
+		if _, err := rs.Seek(int64(tensor.Size()), io.SeekCurrent); err != nil {
+			return fmt.Errorf("failed to seek to tensor: %w", err)
+		}
+	}
+
+	return nil
+}
+
+func readGGUF[T any](llm *gguf, r io.Reader) (T, error) {
+	var t T
+	err := binary.Read(r, llm.ByteOrder, &t)
+	return t, err
+}
+
+func writeGGUF[V any](w io.Writer, t uint32, v V) error {
+	if err := binary.Write(w, binary.LittleEndian, t); err != nil {
+		return err
+	}
+
+	return binary.Write(w, binary.LittleEndian, v)
+}
+
+func readGGUFV1String(llm *gguf, r io.Reader) (string, error) {
+	var length uint64
+	if err := binary.Read(r, llm.ByteOrder, &length); err != nil {
+		return "", err
+	}
+
+	var b bytes.Buffer
+	if _, err := io.CopyN(&b, r, int64(length)); err != nil {
+		return "", err
+	}
+
+	// gguf v1 strings are null-terminated
+	b.Truncate(b.Len() - 1)
+
+	return b.String(), nil
+}
+
+func readGGUFV1StringsData(llm *gguf, r io.Reader, a *array[string]) (any, error) {
+	for i := range a.size {
+		if a.values != nil {
+			e, err := readGGUFV1String(llm, r)
+			if err != nil {
+				return nil, err
+			}
+
+			a.values[i] = e
+		} else {
+			discardGGUFString(llm, r)
+		}
+	}
+
+	return a, nil
+}
+
+func discardGGUFString(llm *gguf, r io.Reader) error {
+	buf := llm.scratch[:8]
+	_, err := io.ReadFull(r, buf)
+	if err != nil {
+		return err
+	}
+
+	size := int(llm.ByteOrder.Uint64(buf))
+	for size > 0 {
+		n, err := r.Read(llm.scratch[:min(size, cap(llm.scratch))])
+		if err != nil {
+			return err
+		}
+		size -= n
+	}
+	return nil
+}
+
+func readGGUFString(llm *gguf, r io.Reader) (string, error) {
+	if llm.Version == 1 {
+		return readGGUFV1String(llm, r)
+	}
+
+	buf := llm.scratch[:8]
+	_, err := io.ReadFull(r, buf)
+	if err != nil {
+		return "", err
+	}
+
+	length := int(llm.ByteOrder.Uint64(buf))
+	if length > len(llm.scratch) {
+		buf = make([]byte, length)
+	} else {
+		buf = llm.scratch[:length]
+	}
+	clear(buf)
+
+	_, err = io.ReadFull(r, buf)
+	if err != nil {
+		return "", err
+	}
+	return string(buf), nil
+}
+
+func writeGGUFString(w io.Writer, s string) error {
+	if err := binary.Write(w, binary.LittleEndian, ggufTypeString); err != nil {
+		return err
+	}
+
+	if err := binary.Write(w, binary.LittleEndian, uint64(len(s))); err != nil {
+		return err
+	}
+
+	_, err := io.Copy(w, strings.NewReader(s))
+	return err
+}
+
+func readGGUFStringsData(llm *gguf, r io.Reader, a *array[string]) (any, error) {
+	for i := range a.size {
+		if a.values != nil {
+			e, err := readGGUFString(llm, r)
+			if err != nil {
+				return nil, err
+			}
+
+			a.values[i] = e
+		} else {
+			discardGGUFString(llm, r)
+		}
+	}
+
+	return a, nil
+}
+
+type array[T any] struct {
+	// size is the actual size of the array
+	size int
+
+	// values is the array of values. this is nil if the array is larger than configured maxSize
+	values []T
+}
+
+func (a *array[T]) MarshalJSON() ([]byte, error) {
+	return json.Marshal(a.values)
+}
+
+func newArray[T any](size, maxSize int) *array[T] {
+	a := array[T]{size: size}
+	if maxSize < 0 || size <= maxSize {
+		a.values = make([]T, size)
+	}
+	return &a
+}
+
+func readGGUFArray(llm *gguf, r io.Reader) (any, error) {
+	t, err := readGGUF[uint32](llm, r)
+	if err != nil {
+		return nil, err
+	}
+
+	n, err := readGGUF[uint64](llm, r)
+	if err != nil {
+		return nil, err
+	}
+
+	switch t {
+	case ggufTypeUint8:
+		a := newArray[uint8](int(n), llm.maxArraySize)
+		return readGGUFArrayData(llm, r, a)
+	case ggufTypeInt8:
+		a := newArray[int8](int(n), llm.maxArraySize)
+		return readGGUFArrayData(llm, r, a)
+	case ggufTypeUint16:
+		a := newArray[uint16](int(n), llm.maxArraySize)
+		return readGGUFArrayData(llm, r, a)
+	case ggufTypeInt16:
+		a := newArray[int16](int(n), llm.maxArraySize)
+		return readGGUFArrayData(llm, r, a)
+	case ggufTypeUint32:
+		a := newArray[uint32](int(n), llm.maxArraySize)
+		return readGGUFArrayData(llm, r, a)
+	case ggufTypeInt32:
+		a := newArray[int32](int(n), llm.maxArraySize)
+		return readGGUFArrayData(llm, r, a)
+	case ggufTypeUint64:
+		a := newArray[uint64](int(n), llm.maxArraySize)
+		return readGGUFArrayData(llm, r, a)
+	case ggufTypeInt64:
+		a := newArray[int64](int(n), llm.maxArraySize)
+		return readGGUFArrayData(llm, r, a)
+	case ggufTypeFloat32:
+		a := newArray[float32](int(n), llm.maxArraySize)
+		return readGGUFArrayData(llm, r, a)
+	case ggufTypeFloat64:
+		a := newArray[float64](int(n), llm.maxArraySize)
+		return readGGUFArrayData(llm, r, a)
+	case ggufTypeBool:
+		a := newArray[bool](int(n), llm.maxArraySize)
+		return readGGUFArrayData(llm, r, a)
+	case ggufTypeString:
+		a := newArray[string](int(n), llm.maxArraySize)
+		if llm.Version == 1 {
+			return readGGUFV1StringsData(llm, r, a)
+		}
+
+		return readGGUFStringsData(llm, r, a)
+	default:
+		return nil, fmt.Errorf("invalid array type: %d", t)
+	}
+}
+
+func readGGUFArrayData[T any](llm *gguf, r io.Reader, a *array[T]) (any, error) {
+	for i := range a.size {
+		e, err := readGGUF[T](llm, r)
+		if err != nil {
+			return nil, err
+		}
+
+		if a.values != nil {
+			a.values[i] = e
+		}
+	}
+
+	return a, nil
+}
+
+// writeGGUFArray writes a slice s of type E to the write with a gguf type of t
+func writeGGUFArray[S ~[]E, E any](w io.Writer, t uint32, s S) error {
+	if err := binary.Write(w, binary.LittleEndian, ggufTypeArray); err != nil {
+		return err
+	}
+
+	if err := binary.Write(w, binary.LittleEndian, t); err != nil {
+		return err
+	}
+
+	if err := binary.Write(w, binary.LittleEndian, uint64(len(s))); err != nil {
+		return err
+	}
+
+	if t == ggufTypeString {
+		for _, e := range any(s).([]string) {
+			if err := binary.Write(w, binary.LittleEndian, uint64(len(e))); err != nil {
+				return err
+			}
+
+			if err := binary.Write(w, binary.LittleEndian, []byte(e)); err != nil {
+				return err
+			}
+		}
+		return nil
+	}
+
+	return binary.Write(w, binary.LittleEndian, s)
+}
+
+func WriteGGUF(f *os.File, kv KV, ts []*Tensor) error {
+	alignment := kv.Uint("general.alignment", 32)
+
+	if err := binary.Write(f, binary.LittleEndian, []byte("GGUF")); err != nil {
+		return err
+	}
+
+	if err := binary.Write(f, binary.LittleEndian, uint32(3)); err != nil {
+		return err
+	}
+
+	if err := binary.Write(f, binary.LittleEndian, uint64(len(ts))); err != nil {
+		return err
+	}
+
+	if err := binary.Write(f, binary.LittleEndian, uint64(len(kv))); err != nil {
+		return err
+	}
+
+	keys := slices.Collect(maps.Keys(kv))
+	slices.Sort(keys)
+
+	for _, key := range keys {
+		if err := ggufWriteKV(f, key, kv[key]); err != nil {
+			return err
+		}
+	}
+
+	slices.SortStableFunc(ts, func(a, b *Tensor) int {
+		if i, j := a.block(), b.block(); i < 0 && j > 0 {
+			return 1
+		} else if i > 0 && j < 0 {
+			return -1
+		} else {
+			return cmp.Compare(i, j)
+		}
+	})
+
+	var s uint64
+	for i := range ts {
+		ts[i].Offset = s
+		if err := ggufWriteTensorInfo(f, ts[i]); err != nil {
+			return err
+		}
+		s += ts[i].Size()
+		s += uint64(ggufPadding(int64(s), int64(alignment)))
+	}
+
+	offset, err := f.Seek(0, io.SeekCurrent)
+	if err != nil {
+		return err
+	}
+	offset += ggufPadding(offset, int64(alignment))
+
+	var g errgroup.Group
+	g.SetLimit(runtime.GOMAXPROCS(0))
+	// TODO consider reducing if tensors size * gomaxprocs is larger than free memory
+	for _, t := range ts {
+		t := t
+		w := io.NewOffsetWriter(f, offset+int64(t.Offset))
+		g.Go(func() error {
+			_, err := t.WriteTo(w)
+			return err
+		})
+	}
+
+	return g.Wait()
+}
+
+func ggufWriteKV(ws io.WriteSeeker, k string, v any) error {
+	slog.Debug(k, "type", fmt.Sprintf("%T", v))
+	if err := binary.Write(ws, binary.LittleEndian, uint64(len(k))); err != nil {
+		return err
+	}
+
+	if err := binary.Write(ws, binary.LittleEndian, []byte(k)); err != nil {
+		return err
+	}
+
+	var err error
+	switch v := v.(type) {
+	case uint32, FileType:
+		err = writeGGUF(ws, ggufTypeUint32, v)
+	case uint64:
+		err = writeGGUF(ws, ggufTypeUint64, v)
+	case float32:
+		err = writeGGUF(ws, ggufTypeFloat32, v)
+	case bool:
+		err = writeGGUF(ws, ggufTypeBool, v)
+	case string:
+		err = writeGGUFString(ws, v)
+	case []int32:
+		err = writeGGUFArray(ws, ggufTypeInt32, v)
+	case *array[int32]:
+		err = writeGGUFArray(ws, ggufTypeInt32, v.values)
+	case []uint32:
+		err = writeGGUFArray(ws, ggufTypeUint32, v)
+	case *array[uint32]:
+		err = writeGGUFArray(ws, ggufTypeUint32, v.values)
+	case []float32:
+		err = writeGGUFArray(ws, ggufTypeFloat32, v)
+	case *array[float32]:
+		err = writeGGUFArray(ws, ggufTypeFloat32, v.values)
+	case []string:
+		err = writeGGUFArray(ws, ggufTypeString, v)
+	case *array[string]:
+		err = writeGGUFArray(ws, ggufTypeString, v.values)
+	default:
+		return fmt.Errorf("improper type for '%s'", k)
+	}
+
+	return err
+}
+
+func ggufWriteTensorInfo(ws io.WriteSeeker, t *Tensor) error {
+	slog.Debug(t.Name, "kind", t.Kind, "shape", t.Shape, "offset", t.Offset)
+	if err := binary.Write(ws, binary.LittleEndian, uint64(len(t.Name))); err != nil {
+		return err
+	}
+
+	if err := binary.Write(ws, binary.LittleEndian, []byte(t.Name)); err != nil {
+		return err
+	}
+
+	if err := binary.Write(ws, binary.LittleEndian, uint32(len(t.Shape))); err != nil {
+		return err
+	}
+
+	for _, n := range t.Shape {
+		if err := binary.Write(ws, binary.LittleEndian, n); err != nil {
+			return err
+		}
+	}
+
+	if err := binary.Write(ws, binary.LittleEndian, t.Kind); err != nil {
+		return err
+	}
+
+	return binary.Write(ws, binary.LittleEndian, t.Offset)
+}
+
+func ggufPadding(offset, align int64) int64 {
+	return (align - offset%align) % align
+}
diff --git a/fs/ggml/gguf_test.go b/fs/ggml/gguf_test.go
new file mode 100644
index 0000000..0e07180
--- /dev/null
+++ b/fs/ggml/gguf_test.go
@@ -0,0 +1,63 @@
+package ggml
+
+import (
+	"bytes"
+	"os"
+	"slices"
+	"testing"
+
+	"github.com/google/go-cmp/cmp"
+)
+
+func TestWriteGGUF(t *testing.T) {
+	w, err := os.CreateTemp(t.TempDir(), "*.bin")
+	if err != nil {
+		t.Fatal(err)
+	}
+	defer w.Close()
+
+	if err := WriteGGUF(w, KV{
+		"general.alignment": uint32(16),
+	}, []*Tensor{
+		{Name: "test.0", Shape: []uint64{2, 3}, WriterTo: bytes.NewBuffer(slices.Repeat([]byte{0}, 2*3*4))},
+		{Name: "test.1", Shape: []uint64{2, 3}, WriterTo: bytes.NewBuffer(slices.Repeat([]byte{0}, 2*3*4))},
+		{Name: "test.2", Shape: []uint64{2, 3}, WriterTo: bytes.NewBuffer(slices.Repeat([]byte{0}, 2*3*4))},
+		{Name: "test.3", Shape: []uint64{2, 3}, WriterTo: bytes.NewBuffer(slices.Repeat([]byte{0}, 2*3*4))},
+		{Name: "test.4", Shape: []uint64{2, 3}, WriterTo: bytes.NewBuffer(slices.Repeat([]byte{0}, 2*3*4))},
+		{Name: "test.5", Shape: []uint64{2, 3}, WriterTo: bytes.NewBuffer(slices.Repeat([]byte{0}, 2*3*4))},
+	}); err != nil {
+		t.Fatal(err)
+	}
+
+	r, err := os.Open(w.Name())
+	if err != nil {
+		t.Fatal(err)
+	}
+	defer r.Close()
+
+	ff, err := Decode(r, 0)
+	if err != nil {
+		t.Fatal(err)
+	}
+
+	if diff := cmp.Diff(ff.KV(), KV{
+		"general.alignment":       uint32(16),
+		"general.parameter_count": uint64(36),
+	}); diff != "" {
+		t.Errorf("Mismatch (-want +got):\n%s", diff)
+	}
+
+	if diff := cmp.Diff(ff.Tensors(), Tensors{
+		Offset: 336,
+		items: []*Tensor{
+			{Name: "test.0", Offset: 0, Shape: []uint64{2, 3}},
+			{Name: "test.1", Offset: 32, Shape: []uint64{2, 3}},
+			{Name: "test.2", Offset: 64, Shape: []uint64{2, 3}},
+			{Name: "test.3", Offset: 96, Shape: []uint64{2, 3}},
+			{Name: "test.4", Offset: 128, Shape: []uint64{2, 3}},
+			{Name: "test.5", Offset: 160, Shape: []uint64{2, 3}},
+		},
+	}, cmp.AllowUnexported(Tensors{})); diff != "" {
+		t.Errorf("Mismatch (-want +got):\n%s", diff)
+	}
+}
diff --git a/fs/ggml/type.go b/fs/ggml/type.go
new file mode 100644
index 0000000..4d3d5bc
--- /dev/null
+++ b/fs/ggml/type.go
@@ -0,0 +1,318 @@
+package ggml
+
+import (
+	"fmt"
+	"log/slog"
+	"strings"
+)
+
+// FileType is the Go equivalent to llama_ftype used for gguf file typing
+type FileType uint32
+
+const (
+	FileTypeF32 FileType = iota
+	FileTypeF16
+	fileTypeQ4_0
+	fileTypeQ4_1
+	fileTypeQ4_1_F16 // unused by GGML
+	fileTypeQ4_2     // unused by GGML
+	fileTypeQ4_3     // unused by GGML
+	FileTypeQ8_0
+	fileTypeQ5_0
+	fileTypeQ5_1
+	fileTypeQ2_K
+	fileTypeQ3_K_S
+	fileTypeQ3_K_M
+	fileTypeQ3_K_L
+	FileTypeQ4_K_S
+	FileTypeQ4_K_M
+	fileTypeQ5_K_S
+	fileTypeQ5_K_M
+	fileTypeQ6_K
+	fileTypeIQ2_XXS
+	fileTypeIQ2_XS
+	fileTypeQ2_K_S
+	fileTypeIQ3_XS
+	fileTypeIQ3_XXS
+	fileTypeIQ1_S
+	fileTypeIQ4_NL
+	fileTypeIQ3_S
+	fileTypeIQ3_M
+	fileTypeIQ2_S
+	fileTypeIQ2_M
+	fileTypeIQ4_XS
+	fileTypeIQ1_M
+	FileTypeBF16
+	fileTypeQ4_0_4_4 // unused by GGML
+	fileTypeQ4_0_4_8 // unused by GGML
+	fileTypeQ4_0_8_8 // unused by GGML
+	fileTypeTQ1_0
+	fileTypeTQ2_0
+
+	FileTypeUnknown = 1024
+)
+
+// ParseFileType parses the provided GGUF file type
+// Only Ollama supported types are considered valid
+func ParseFileType(s string) (FileType, error) {
+	switch s {
+	case "F32":
+		return FileTypeF32, nil
+	case "F16":
+		return FileTypeF16, nil
+	case "Q8_0":
+		return FileTypeQ8_0, nil
+	case "Q4_K_S":
+		return FileTypeQ4_K_S, nil
+	case "Q4_K_M", "Q4_K":
+		return FileTypeQ4_K_M, nil
+	case "BF16":
+		return FileTypeBF16, nil
+	default:
+		supportedFileTypes := []FileType{
+			FileTypeF32,
+			FileTypeF16,
+			FileTypeQ4_K_S,
+			FileTypeQ4_K_M,
+			FileTypeQ8_0,
+			// fsggml.FileTypeBF16, // TODO
+		}
+		strs := make([]string, len(supportedFileTypes))
+		for i := range supportedFileTypes {
+			strs[i] = supportedFileTypes[i].String()
+		}
+
+		return FileTypeUnknown, fmt.Errorf("unsupported quantization type %s - supported types are %s", s, strings.Join(strs, ", "))
+	}
+}
+
+func (t FileType) String() string {
+	// Note: this routine will return a broader set of file types for existing models
+	switch t {
+	case FileTypeF32:
+		return "F32"
+	case FileTypeF16:
+		return "F16"
+	case fileTypeQ4_0:
+		return "Q4_0"
+	case fileTypeQ4_1:
+		return "Q4_1"
+	case FileTypeQ8_0:
+		return "Q8_0"
+	case fileTypeQ5_0:
+		return "Q5_0"
+	case fileTypeQ5_1:
+		return "Q5_1"
+	case fileTypeQ2_K:
+		return "Q2_K"
+	case fileTypeQ3_K_S:
+		return "Q3_K_S"
+	case fileTypeQ3_K_M:
+		return "Q3_K_M"
+	case fileTypeQ3_K_L:
+		return "Q3_K_L"
+	case FileTypeQ4_K_S:
+		return "Q4_K_S"
+	case FileTypeQ4_K_M:
+		return "Q4_K_M"
+	case fileTypeQ5_K_S:
+		return "Q5_K_S"
+	case fileTypeQ5_K_M:
+		return "Q5_K_M"
+	case fileTypeQ6_K:
+		return "Q6_K"
+	case fileTypeQ2_K_S:
+		return "Q2_K_S"
+	case FileTypeBF16:
+		return "BF16"
+	default:
+		return "unknown"
+	}
+}
+
+func (t FileType) Value() uint32 {
+	return uint32(t)
+}
+
+func (ftype FileType) ToTensorType() TensorType {
+	switch ftype {
+	case FileTypeF32:
+		return TensorTypeF32
+	case FileTypeF16:
+		return TensorTypeF16
+	case fileTypeQ4_0:
+		return TensorTypeQ4_0
+	case fileTypeQ4_1:
+		return TensorTypeQ4_1
+	case FileTypeQ8_0:
+		return TensorTypeQ8_0
+	case fileTypeQ5_0:
+		return TensorTypeQ5_0
+	case fileTypeQ5_1:
+		return TensorTypeQ5_1
+	case fileTypeQ2_K:
+		return TensorTypeQ2_K
+	case fileTypeQ3_K_S:
+		return TensorTypeQ3_K
+	case fileTypeQ3_K_M:
+		return TensorTypeQ3_K
+	case fileTypeQ3_K_L:
+		return TensorTypeQ3_K
+	case FileTypeQ4_K_S:
+		return TensorTypeQ4_K
+	case FileTypeQ4_K_M:
+		return TensorTypeQ4_K
+	case fileTypeQ5_K_S:
+		return TensorTypeQ5_K
+	case fileTypeQ5_K_M:
+		return TensorTypeQ5_K
+	case fileTypeQ6_K:
+		return TensorTypeQ6_K
+	case fileTypeQ2_K_S:
+		return TensorTypeQ2_K
+	case FileTypeBF16:
+		return TensorTypeBF16
+	default:
+		slog.Warn("unsupported file type", "type", ftype)
+		return 0 // F32
+	}
+}
+
+// TensorType is equivalent to ggml_type for individual tensor types
+// Note: these are not the same as FileType
+type TensorType uint32
+
+const (
+	TensorTypeF32 TensorType = iota
+	TensorTypeF16
+	TensorTypeQ4_0
+	TensorTypeQ4_1
+	tensorTypeQ4_2 // unused by GGML
+	tensorTypeQ4_3 // unused by GGML
+	TensorTypeQ5_0
+	TensorTypeQ5_1
+	TensorTypeQ8_0
+	TensorTypeQ8_1
+	TensorTypeQ2_K
+	TensorTypeQ3_K
+	TensorTypeQ4_K
+	TensorTypeQ5_K
+	TensorTypeQ6_K
+	TensorTypeQ8_K
+	tensorTypeIQ2_XXS // not supported by ollama
+	tensorTypeIQ2_XS  // not supported by ollama
+	tensorTypeIQ3_XXS // not supported by ollama
+	tensorTypeIQ1_S   // not supported by ollama
+	tensorTypeIQ4_NL  // not supported by ollama
+	tensorTypeIQ3_S   // not supported by ollama
+	tensorTypeIQ2_S   // not supported by ollama
+	tensorTypeIQ4_XS  // not supported by ollama
+	TensorTypeI8
+	TensorTypeI16
+	TensorTypeI32
+	TensorTypeI64
+	TensorTypeF64
+	tensorTypeIQ1_M // not supported by ollama
+	TensorTypeBF16
+	tensorTypeQ4_0_4_4   // unused by GGML
+	tensorTypeQ4_0_4_8   // unused by GGML
+	tensorTypeQ4_0_8_8   // unused by GGML
+	tensorTypeTQ1_0      // not supported by ollama
+	tensorTypeTQ2_0      // not supported by ollama
+	tensorTypeIQ4_NL_4_4 // unused by GGML
+	tensorTypeIQ4_NL_4_8 // unused by GGML
+	tensorTypeIQ4_NL_8_8 // unused by GGML
+)
+
+// ParseFileType parses the provided GGUF file type
+// Only Ollama supported types are considered valid
+func ParseTensorType(s string) (TensorType, error) {
+	switch s {
+	case "F32":
+		return TensorTypeF32, nil
+	case "F16":
+		return TensorTypeF16, nil
+	case "Q4_0":
+		return TensorTypeQ4_0, nil
+	case "Q4_1":
+		return TensorTypeQ4_1, nil
+	case "Q5_0":
+		return TensorTypeQ5_0, nil
+	case "Q5_1":
+		return TensorTypeQ5_1, nil
+	case "Q8_0":
+		return TensorTypeQ8_0, nil
+	case "Q8_1":
+		return TensorTypeQ8_1, nil
+	case "Q2_K":
+		return TensorTypeQ2_K, nil
+	case "Q3_K":
+		return TensorTypeQ3_K, nil
+	case "Q4_K":
+		return TensorTypeQ4_K, nil
+	case "Q5_K":
+		return TensorTypeQ5_K, nil
+	case "Q6_K":
+		return TensorTypeQ6_K, nil
+	case "Q8_K":
+		return TensorTypeQ8_K, nil
+	case "F64":
+		return TensorTypeF64, nil
+	case "BF16":
+		return TensorTypeBF16, nil
+	default:
+		return 0, fmt.Errorf("unsupported quantization type %s", s)
+	}
+}
+
+func (t TensorType) IsQuantized() bool {
+	switch t {
+	case TensorTypeF32, TensorTypeF16, TensorTypeBF16:
+		return false
+	default:
+		return true
+	}
+}
+
+func (t TensorType) RowSize(ne uint64) uint64 {
+	return t.TypeSize() * ne / t.BlockSize()
+}
+
+func (t TensorType) String() string {
+	switch t {
+	case TensorTypeF32:
+		return "F32"
+	case TensorTypeF16:
+		return "F16"
+	case TensorTypeQ4_0:
+		return "Q4_0"
+	case TensorTypeQ4_1:
+		return "Q4_1"
+	case TensorTypeQ5_0:
+		return "Q5_0"
+	case TensorTypeQ5_1:
+		return "Q5_1"
+	case TensorTypeQ8_0:
+		return "Q8_0"
+	case TensorTypeQ8_1:
+		return "Q8_1"
+	case TensorTypeQ2_K:
+		return "Q2_K"
+	case TensorTypeQ3_K:
+		return "Q3_K"
+	case TensorTypeQ4_K:
+		return "Q4_K"
+	case TensorTypeQ5_K:
+		return "Q5_K"
+	case TensorTypeQ6_K:
+		return "Q6_K"
+	case TensorTypeQ8_K:
+		return "Q8_K"
+	case TensorTypeF64:
+		return "F64"
+	case TensorTypeBF16:
+		return "BF16"
+	default:
+		return "unknown"
+	}
+}
diff --git a/fs/util/bufioutil/buffer_seeker.go b/fs/util/bufioutil/buffer_seeker.go
new file mode 100644
index 0000000..8775fdb
--- /dev/null
+++ b/fs/util/bufioutil/buffer_seeker.go
@@ -0,0 +1,34 @@
+package bufioutil
+
+import (
+	"bufio"
+	"io"
+)
+
+type BufferedSeeker struct {
+	rs io.ReadSeeker
+	br *bufio.Reader
+}
+
+func NewBufferedSeeker(rs io.ReadSeeker, size int) *BufferedSeeker {
+	return &BufferedSeeker{
+		rs: rs,
+		br: bufio.NewReaderSize(rs, size),
+	}
+}
+
+func (b *BufferedSeeker) Read(p []byte) (int, error) {
+	return b.br.Read(p)
+}
+
+func (b *BufferedSeeker) Seek(offset int64, whence int) (int64, error) {
+	if whence == io.SeekCurrent {
+		offset -= int64(b.br.Buffered())
+	}
+	n, err := b.rs.Seek(offset, whence)
+	if err != nil {
+		return 0, err
+	}
+	b.br.Reset(b.rs)
+	return n, nil
+}
diff --git a/fs/util/bufioutil/buffer_seeker_test.go b/fs/util/bufioutil/buffer_seeker_test.go
new file mode 100644
index 0000000..87145f6
--- /dev/null
+++ b/fs/util/bufioutil/buffer_seeker_test.go
@@ -0,0 +1,64 @@
+package bufioutil
+
+import (
+	"bytes"
+	"io"
+	"strings"
+	"testing"
+)
+
+func TestBufferedSeeker(t *testing.T) {
+	const alphabet = "abcdefghijklmnopqrstuvwxyz"
+
+	bs := NewBufferedSeeker(strings.NewReader(alphabet), 0) // minReadBufferSize = 16
+
+	checkRead := func(buf []byte, expected string) {
+		t.Helper()
+		_, err := bs.Read(buf)
+		if err != nil {
+			t.Fatal(err)
+		}
+		if !bytes.Equal(buf, []byte(expected)) {
+			t.Fatalf("expected %s, got %s", expected, buf)
+		}
+	}
+
+	// Read the first 5 bytes
+	buf := make([]byte, 5)
+
+	checkRead(buf, "abcde")
+
+	// Seek back to the beginning
+	_, err := bs.Seek(0, io.SeekStart)
+	if err != nil {
+		t.Fatal(err)
+	}
+
+	// read 'a'
+	checkRead(buf[:1], "a")
+
+	if bs.br.Buffered() == 0 {
+		t.Fatalf("totally unexpected sanity check failed")
+	}
+
+	// Seek past 'b'
+	_, err = bs.Seek(1, io.SeekCurrent)
+	if err != nil {
+		t.Fatal(err)
+	}
+	checkRead(buf, "cdefg")
+
+	// Seek back to the beginning
+	_, err = bs.Seek(0, io.SeekStart)
+	if err != nil {
+		t.Fatal(err)
+	}
+	checkRead(buf, "abcde")
+
+	// Seek to the end
+	_, err = bs.Seek(-5, io.SeekEnd)
+	if err != nil {
+		t.Fatal(err)
+	}
+	checkRead(buf, "vwxyz")
+}
diff --git a/go.mod b/go.mod
new file mode 100644
index 0000000..283286b
--- /dev/null
+++ b/go.mod
@@ -0,0 +1,81 @@
+module github.com/ollama/ollama
+
+go 1.24.0
+
+require (
+	github.com/containerd/console v1.0.3
+	github.com/gin-gonic/gin v1.10.0
+	github.com/golang/protobuf v1.5.4 // indirect
+	github.com/google/uuid v1.6.0
+	github.com/olekukonko/tablewriter v0.0.5
+	github.com/spf13/cobra v1.7.0
+	github.com/stretchr/testify v1.9.0
+	github.com/x448/float16 v0.8.4
+	golang.org/x/sync v0.12.0
+)
+
+require (
+	github.com/agnivade/levenshtein v1.1.1
+	github.com/d4l3k/go-bfloat16 v0.0.0-20211005043715-690c3bdd05f1
+	github.com/dlclark/regexp2 v1.11.4
+	github.com/emirpasic/gods/v2 v2.0.0-alpha
+	github.com/google/go-cmp v0.6.0
+	github.com/mattn/go-runewidth v0.0.14
+	github.com/nlpodyssey/gopickle v0.3.0
+	github.com/pdevine/tensor v0.0.0-20240510204454-f88f4562727c
+	golang.org/x/image v0.22.0
+	golang.org/x/tools v0.30.0
+)
+
+require (
+	github.com/apache/arrow/go/arrow v0.0.0-20211112161151-bc219186db40 // indirect
+	github.com/bytedance/sonic/loader v0.1.1 // indirect
+	github.com/chewxy/hm v1.0.0 // indirect
+	github.com/chewxy/math32 v1.11.0 // indirect
+	github.com/cloudwego/base64x v0.1.4 // indirect
+	github.com/cloudwego/iasm v0.2.0 // indirect
+	github.com/davecgh/go-spew v1.1.1 // indirect
+	github.com/gogo/protobuf v1.3.2 // indirect
+	github.com/google/flatbuffers v24.3.25+incompatible // indirect
+	github.com/kr/text v0.2.0 // indirect
+	github.com/pkg/errors v0.9.1 // indirect
+	github.com/pmezard/go-difflib v1.0.0 // indirect
+	github.com/rivo/uniseg v0.2.0 // indirect
+	github.com/xtgo/set v1.0.0 // indirect
+	go4.org/unsafe/assume-no-moving-gc v0.0.0-20231121144256-b99613f794b6 // indirect
+	golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 // indirect
+	gonum.org/v1/gonum v0.15.0 // indirect
+	gorgonia.org/vecf32 v0.9.0 // indirect
+	gorgonia.org/vecf64 v0.9.0 // indirect
+)
+
+require (
+	github.com/bytedance/sonic v1.11.6 // indirect
+	github.com/gabriel-vasile/mimetype v1.4.3 // indirect
+	github.com/gin-contrib/cors v1.7.2
+	github.com/gin-contrib/sse v0.1.0 // indirect
+	github.com/go-playground/locales v0.14.1 // indirect
+	github.com/go-playground/universal-translator v0.18.1 // indirect
+	github.com/go-playground/validator/v10 v10.20.0 // indirect
+	github.com/goccy/go-json v0.10.2 // indirect
+	github.com/inconshreveable/mousetrap v1.1.0 // indirect
+	github.com/json-iterator/go v1.1.12 // indirect
+	github.com/klauspost/cpuid/v2 v2.2.7 // indirect
+	github.com/leodido/go-urn v1.4.0 // indirect
+	github.com/mattn/go-isatty v0.0.20 // indirect
+	github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
+	github.com/modern-go/reflect2 v1.0.2 // indirect
+	github.com/pelletier/go-toml/v2 v2.2.2 // indirect
+	github.com/spf13/pflag v1.0.5 // indirect
+	github.com/twitchyliquid64/golang-asm v0.15.1 // indirect
+	github.com/ugorji/go/codec v1.2.12 // indirect
+	golang.org/x/arch v0.8.0 // indirect
+	golang.org/x/crypto v0.36.0
+	golang.org/x/exp v0.0.0-20250218142911-aa4b98e5adaa
+	golang.org/x/net v0.38.0 // indirect
+	golang.org/x/sys v0.31.0
+	golang.org/x/term v0.30.0
+	golang.org/x/text v0.23.0
+	google.golang.org/protobuf v1.34.1
+	gopkg.in/yaml.v3 v3.0.1 // indirect
+)
diff --git a/go.sum b/go.sum
new file mode 100644
index 0000000..5755616
--- /dev/null
+++ b/go.sum
@@ -0,0 +1,371 @@
+cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
+cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
+dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU=
+gioui.org v0.0.0-20210308172011-57750fc8a0a6/go.mod h1:RSH6KIUZ0p2xy5zHDxgAM4zumjgTw83q2ge/PI+yyw8=
+github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
+github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo=
+github.com/agnivade/levenshtein v1.1.1 h1:QY8M92nrzkmr798gCo3kmMyqXFzdQVpxLlGPRBij0P8=
+github.com/agnivade/levenshtein v1.1.1/go.mod h1:veldBMzWxcCG2ZvUTKD2kJNRdCk5hVbJomOvKkmgYbo=
+github.com/ajstarks/svgo v0.0.0-20180226025133-644b8db467af/go.mod h1:K08gAheRH3/J6wwsYMMT4xOr94bZjxIelGM0+d/wbFw=
+github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY=
+github.com/apache/arrow/go/arrow v0.0.0-20211112161151-bc219186db40 h1:q4dksr6ICHXqG5hm0ZW5IHyeEJXoIJSOZeBLmWPNeIQ=
+github.com/apache/arrow/go/arrow v0.0.0-20211112161151-bc219186db40/go.mod h1:Q7yQnSMnLvcXlZ8RV+jwz/6y1rQTqbX6C82SndT52Zs=
+github.com/arbovm/levenshtein v0.0.0-20160628152529-48b4e1c0c4d0 h1:jfIu9sQUG6Ig+0+Ap1h4unLjW6YQJpKZVmUzxsD4E/Q=
+github.com/arbovm/levenshtein v0.0.0-20160628152529-48b4e1c0c4d0/go.mod h1:t2tdKJDJF9BV14lnkjHmOQgcvEKgtqs5a1N3LNdJhGE=
+github.com/boombuler/barcode v1.0.0/go.mod h1:paBWMcWSl3LHKBqUq+rly7CNSldXjb2rDl3JlRe0mD8=
+github.com/bytedance/sonic v1.11.6 h1:oUp34TzMlL+OY1OUWxHqsdkgC/Zfc85zGqw9siXjrc0=
+github.com/bytedance/sonic v1.11.6/go.mod h1:LysEHSvpvDySVdC2f87zGWf6CIKJcAvqab1ZaiQtds4=
+github.com/bytedance/sonic/loader v0.1.1 h1:c+e5Pt1k/cy5wMveRDyk2X4B9hF4g7an8N3zCYjJFNM=
+github.com/bytedance/sonic/loader v0.1.1/go.mod h1:ncP89zfokxS5LZrJxl5z0UJcsk4M4yY2JpfqGeCtNLU=
+github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
+github.com/chewxy/hm v1.0.0 h1:zy/TSv3LV2nD3dwUEQL2VhXeoXbb9QkpmdRAVUFiA6k=
+github.com/chewxy/hm v1.0.0/go.mod h1:qg9YI4q6Fkj/whwHR1D+bOGeF7SniIP40VweVepLjg0=
+github.com/chewxy/math32 v1.0.0/go.mod h1:Miac6hA1ohdDUTagnvJy/q+aNnEk16qWUdb8ZVhvCN0=
+github.com/chewxy/math32 v1.11.0 h1:8sek2JWqeaKkVnHa7bPVqCEOUPbARo4SGxs6toKyAOo=
+github.com/chewxy/math32 v1.11.0/go.mod h1:dOB2rcuFrCn6UHrze36WSLVPKtzPMRAQvBvUwkSsLqs=
+github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw=
+github.com/cloudwego/base64x v0.1.4 h1:jwCgWpFanWmN8xoIUHa2rtzmkd5J2plF/dnLS6Xd/0Y=
+github.com/cloudwego/base64x v0.1.4/go.mod h1:0zlkT4Wn5C6NdauXdJRhSKRlJvmclQ1hhJgA0rcu/8w=
+github.com/cloudwego/iasm v0.2.0 h1:1KNIy1I1H9hNNFEEH3DVnI4UujN+1zjpuk6gwHLTssg=
+github.com/cloudwego/iasm v0.2.0/go.mod h1:8rXZaNYT2n95jn+zTI1sDr+IgcD2GVs0nlbbQPiEFhY=
+github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc=
+github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk=
+github.com/cncf/xds/go v0.0.0-20210312221358-fbca930ec8ed/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs=
+github.com/containerd/console v1.0.3 h1:lIr7SlA5PxZyMV30bDW0MGbiOPXwc63yRuCP0ARubLw=
+github.com/containerd/console v1.0.3/go.mod h1:7LqA/THxQ86k76b8c/EMSiaJ3h1eZkMkXar0TQ1gf3U=
+github.com/cpuguy83/go-md2man/v2 v2.0.2/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
+github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
+github.com/d4l3k/go-bfloat16 v0.0.0-20211005043715-690c3bdd05f1 h1:cBzrdJPAFBsgCrDPnZxlp1dF2+k4r1kVpD7+1S1PVjY=
+github.com/d4l3k/go-bfloat16 v0.0.0-20211005043715-690c3bdd05f1/go.mod h1:uw2gLcxEuYUlAd/EXyjc/v55nd3+47YAgWbSXVxPrNI=
+github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
+github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
+github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
+github.com/dgryski/trifles v0.0.0-20200323201526-dd97f9abfb48 h1:fRzb/w+pyskVMQ+UbP35JkH8yB7MYb4q/qhBarqZE6g=
+github.com/dgryski/trifles v0.0.0-20200323201526-dd97f9abfb48/go.mod h1:if7Fbed8SFyPtHLHbg49SI7NAdJiC5WIA09pe59rfAA=
+github.com/dlclark/regexp2 v1.11.4 h1:rPYF9/LECdNymJufQKmri9gV604RvvABwgOA8un7yAo=
+github.com/dlclark/regexp2 v1.11.4/go.mod h1:DHkYz0B9wPfa6wondMfaivmHpzrQ3v9q8cnmRbL6yW8=
+github.com/emirpasic/gods/v2 v2.0.0-alpha h1:dwFlh8pBg1VMOXWGipNMRt8v96dKAIvBehtCt6OtunU=
+github.com/emirpasic/gods/v2 v2.0.0-alpha/go.mod h1:W0y4M2dtBB9U5z3YlghmpuUhiaZT2h6yoeE+C1sCp6A=
+github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
+github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
+github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98=
+github.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk=
+github.com/envoyproxy/go-control-plane v0.9.9-0.20210217033140-668b12f5399d/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk=
+github.com/envoyproxy/go-control-plane v0.9.9-0.20210512163311-63b5d3c536b0/go.mod h1:hliV/p42l8fGbc6Y9bQ70uLwIvmJyVE5k4iMKlh8wCQ=
+github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c=
+github.com/fogleman/gg v1.2.1-0.20190220221249-0403632d5b90/go.mod h1:R/bRT+9gY/C5z7JzPU0zXsXHKM4/ayA+zqcVNZzPa1k=
+github.com/fogleman/gg v1.3.0/go.mod h1:R/bRT+9gY/C5z7JzPU0zXsXHKM4/ayA+zqcVNZzPa1k=
+github.com/gabriel-vasile/mimetype v1.4.3 h1:in2uUcidCuFcDKtdcBxlR0rJ1+fsokWf+uqxgUFjbI0=
+github.com/gabriel-vasile/mimetype v1.4.3/go.mod h1:d8uq/6HKRL6CGdk+aubisF/M5GcPfT7nKyLpA0lbSSk=
+github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04=
+github.com/gin-contrib/cors v1.7.2 h1:oLDHxdg8W/XDoN/8zamqk/Drgt4oVZDvaV0YmvVICQw=
+github.com/gin-contrib/cors v1.7.2/go.mod h1:SUJVARKgQ40dmrzgXEVxj2m7Ig1v1qIboQkPDTQ9t2E=
+github.com/gin-contrib/sse v0.1.0 h1:Y/yl/+YNO8GZSjAhjMsSuLt29uWRFHdHYUb5lYOV9qE=
+github.com/gin-contrib/sse v0.1.0/go.mod h1:RHrZQHXnP2xjPF+u1gW/2HnVO7nvIa9PG3Gm+fLHvGI=
+github.com/gin-gonic/gin v1.10.0 h1:nTuyha1TYqgedzytsKYqna+DfLos46nTv2ygFy86HFU=
+github.com/gin-gonic/gin v1.10.0/go.mod h1:4PMNQiOhvDRa013RKVbsiNwoyezlm2rm0uX/T7kzp5Y=
+github.com/go-fonts/dejavu v0.1.0/go.mod h1:4Wt4I4OU2Nq9asgDCteaAaWZOV24E+0/Pwo0gppep4g=
+github.com/go-fonts/latin-modern v0.2.0/go.mod h1:rQVLdDMK+mK1xscDwsqM5J8U2jrRa3T0ecnM9pNujks=
+github.com/go-fonts/liberation v0.1.1/go.mod h1:K6qoJYypsmfVjWg8KOVDQhLc8UDgIK2HYqyqAO9z7GY=
+github.com/go-fonts/stix v0.1.0/go.mod h1:w/c1f0ldAUlJmLBvlbkvVXLAD+tAMqobIIQpmnUIzUY=
+github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU=
+github.com/go-latex/latex v0.0.0-20210118124228-b3d85cf34e07/go.mod h1:CO1AlKB2CSIqUrmQPqA0gdRIlnLEY0gK5JGjh37zN5U=
+github.com/go-playground/assert/v2 v2.2.0 h1:JvknZsQTYeFEAhQwI4qEt9cyV5ONwRHC+lYKSsYSR8s=
+github.com/go-playground/assert/v2 v2.2.0/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4=
+github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/oXslEjJA=
+github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY=
+github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJnYK9S473LQFuzCbDbfSFY=
+github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY=
+github.com/go-playground/validator/v10 v10.20.0 h1:K9ISHbSaI0lyB2eWMPJo+kOS/FBExVwjEviJTixqxL8=
+github.com/go-playground/validator/v10 v10.20.0/go.mod h1:dbuPbCMFw/DrkbEynArYaCwl3amGuJotoKCe95atGMM=
+github.com/goccy/go-json v0.10.2 h1:CrxCmQqYDkv1z7lO7Wbh2HN93uovUHgrECaO5ZrCXAU=
+github.com/goccy/go-json v0.10.2/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I=
+github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q=
+github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q=
+github.com/golang/freetype v0.0.0-20170609003504-e2365dfdc4a0/go.mod h1:E/TSTwGwJL78qG/PmXZO1EjYhfJinVAhrmmHX6Z8B9k=
+github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q=
+github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=
+github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
+github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
+github.com/golang/protobuf v1.3.3/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw=
+github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8=
+github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA=
+github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs=
+github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:WU3c8KckQ9AFe+yFwt9sWVRKCVIyN9cPHBJSNnbL67w=
+github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0=
+github.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QDs8UjoX8=
+github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI=
+github.com/golang/protobuf v1.4.3/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI=
+github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk=
+github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY=
+github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek=
+github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps=
+github.com/golang/snappy v0.0.3 h1:fHPg5GQYlCeLIPB9BZqMVR5nR9A+IM5zcgeTdjMYmLA=
+github.com/golang/snappy v0.0.3/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
+github.com/google/flatbuffers v2.0.0+incompatible/go.mod h1:1AeVuKshWv4vARoZatz6mlQ0JxURH0Kv5+zNeJKJCa8=
+github.com/google/flatbuffers v24.3.25+incompatible h1:CX395cjN9Kke9mmalRoL3d81AtFUxJM+yDthflgJGkI=
+github.com/google/flatbuffers v24.3.25+incompatible/go.mod h1:1AeVuKshWv4vARoZatz6mlQ0JxURH0Kv5+zNeJKJCa8=
+github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M=
+github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
+github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
+github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
+github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
+github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
+github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
+github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI=
+github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
+github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
+github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
+github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
+github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
+github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw=
+github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8=
+github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw=
+github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM=
+github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo=
+github.com/jung-kurt/gofpdf v1.0.0/go.mod h1:7Id9E/uU8ce6rXgefFLlgrJj/GYY22cpxn+r32jIOes=
+github.com/jung-kurt/gofpdf v1.0.3-0.20190309125859-24315acbbda5/go.mod h1:7Id9E/uU8ce6rXgefFLlgrJj/GYY22cpxn+r32jIOes=
+github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8=
+github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck=
+github.com/klauspost/compress v1.13.1 h1:wXr2uRxZTJXHLly6qhJabee5JqIhTRoLBhDOA74hDEQ=
+github.com/klauspost/compress v1.13.1/go.mod h1:8dP1Hq4DHOhN9w426knH3Rhby4rFm6D8eO+e+Dq5Gzg=
+github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg=
+github.com/klauspost/cpuid/v2 v2.2.7 h1:ZWSB3igEs+d0qvnxR/ZBzXVmxkgt8DdzP6m9pfuVLDM=
+github.com/klauspost/cpuid/v2 v2.2.7/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws=
+github.com/knz/go-libedit v1.10.1/go.mod h1:MZTVkCWyz0oBc7JOWP3wNAzd002ZbM/5hgShxwh4x8M=
+github.com/kr/pretty v0.3.0 h1:WgNl7dwNpEZ6jJ9k1snq4pZsg7DOEN8hP9Xw0Tsjwk0=
+github.com/kr/pretty v0.3.0/go.mod h1:640gp4NfQd8pI5XOwp5fnNeVWj67G7CFk/SaSQn7NBk=
+github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
+github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
+github.com/leodido/go-urn v1.4.0 h1:WT9HwE9SGECu3lg4d/dIA+jxlljEa1/ffXKmRjqdmIQ=
+github.com/leodido/go-urn v1.4.0/go.mod h1:bvxc+MVxLKB4z00jd1z+Dvzr47oO32F/QSNjSBOlFxI=
+github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
+github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
+github.com/mattn/go-runewidth v0.0.9/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m2gUSrubnMI=
+github.com/mattn/go-runewidth v0.0.14 h1:+xnbZSEeDbOIg5/mE6JF0w6n9duR1l3/WmbinWVwUuU=
+github.com/mattn/go-runewidth v0.0.14/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w=
+github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
+github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg=
+github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
+github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M=
+github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk=
+github.com/nlpodyssey/gopickle v0.3.0 h1:BLUE5gxFLyyNOPzlXxt6GoHEMMxD0qhsE4p0CIQyoLw=
+github.com/nlpodyssey/gopickle v0.3.0/go.mod h1:f070HJ/yR+eLi5WmM1OXJEGaTpuJEUiib19olXgYha0=
+github.com/olekukonko/tablewriter v0.0.5 h1:P2Ga83D34wi1o9J6Wh1mRuqd4mF/x/lgBS7N7AbDhec=
+github.com/olekukonko/tablewriter v0.0.5/go.mod h1:hPp6KlRPjbx+hW8ykQs1w3UBbZlj6HuIJcUGPhkA7kY=
+github.com/pdevine/tensor v0.0.0-20240510204454-f88f4562727c h1:GwiUUjKefgvSNmv3NCvI/BL0kDebW6Xa+kcdpdc1mTY=
+github.com/pdevine/tensor v0.0.0-20240510204454-f88f4562727c/go.mod h1:PSojXDXF7TbgQiD6kkd98IHOS0QqTyUEaWRiS8+BLu8=
+github.com/pelletier/go-toml/v2 v2.2.2 h1:aYUidT7k73Pcl9nb2gScu7NSrKCSHIDE89b3+6Wq+LM=
+github.com/pelletier/go-toml/v2 v2.2.2/go.mod h1:1t835xjRzz80PqgE6HHgN2JOsmgYu/h4qDAS4n929Rs=
+github.com/phpdave11/gofpdf v1.4.2/go.mod h1:zpO6xFn9yxo3YLyMvW8HcKWVdbNqgIfOOp2dXMnm1mY=
+github.com/phpdave11/gofpdi v1.0.12/go.mod h1:vBmVV0Do6hSBHC8uKUQ71JGW+ZGQq74llk/7bXwjDoI=
+github.com/pierrec/lz4/v4 v4.1.8 h1:ieHkV+i2BRzngO4Wd/3HGowuZStgq6QkPsD1eolNAO4=
+github.com/pierrec/lz4/v4 v4.1.8/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4=
+github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
+github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
+github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
+github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
+github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
+github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
+github.com/rivo/uniseg v0.2.0 h1:S1pD9weZBuJdFmowNwbpi7BJ8TNftyUImj/0WQi72jY=
+github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc=
+github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ=
+github.com/rogpeppe/go-internal v1.8.0 h1:FCbCCtXNOY3UtUuHUYaghJg4y7Fd14rXifAYUAtL9R8=
+github.com/rogpeppe/go-internal v1.8.0/go.mod h1:WmiCO8CzOY8rg0OYDC4/i/2WRWAB6poM+XZ2dLUbcbE=
+github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
+github.com/ruudk/golang-pdf417 v0.0.0-20181029194003-1af4ab5afa58/go.mod h1:6lfFZQK844Gfx8o5WFuvpxWRwnSoipWe/p622j1v06w=
+github.com/spf13/cobra v1.7.0 h1:hyqWnYt1ZQShIddO5kBpj3vu05/++x6tJ6dg8EC572I=
+github.com/spf13/cobra v1.7.0/go.mod h1:uLxZILRyS/50WlhOIKD7W6V5bgeIt+4sICxh6uRMrb0=
+github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA=
+github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
+github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
+github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
+github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo=
+github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA=
+github.com/stretchr/testify v1.1.4/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
+github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
+github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
+github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA=
+github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
+github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
+github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
+github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
+github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo=
+github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg=
+github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
+github.com/twitchyliquid64/golang-asm v0.15.1 h1:SU5vSMR7hnwNxj24w34ZyCi/FmDZTkS4MhqMhdFk5YI=
+github.com/twitchyliquid64/golang-asm v0.15.1/go.mod h1:a1lVb/DtPvCB8fslRZhAngC2+aY1QWCk3Cedj/Gdt08=
+github.com/ugorji/go/codec v1.2.12 h1:9LC83zGrHhuUA9l16C9AHXAqEV/2wBQ4nkvumAE65EE=
+github.com/ugorji/go/codec v1.2.12/go.mod h1:UNopzCgEMSXjBc6AOMqYvWC1ktqTAfzJZUZgYf6w6lg=
+github.com/x448/float16 v0.8.4 h1:qLwI1I70+NjRFUR3zs1JPUCgaCXSh3SW62uAKT1mSBM=
+github.com/x448/float16 v0.8.4/go.mod h1:14CWIYCyZA/cWjXOioeEpHeN/83MdbZDRQHoFcYsOfg=
+github.com/xtgo/set v1.0.0 h1:6BCNBRv3ORNDQ7fyoJXRv+tstJz3m1JVFQErfeZz2pY=
+github.com/xtgo/set v1.0.0/go.mod h1:d3NHzGzSa0NmB2NhFyECA+QdRp29oEn2xbT+TpeFoM8=
+github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
+github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
+github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k=
+go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI=
+go4.org/unsafe/assume-no-moving-gc v0.0.0-20231121144256-b99613f794b6 h1:lGdhQUN/cnWdSH3291CUuxSEqc+AsGTiDxPP3r2J0l4=
+go4.org/unsafe/assume-no-moving-gc v0.0.0-20231121144256-b99613f794b6/go.mod h1:FftLjUGFEDu5k8lt0ddY+HcrH/qU/0qk+H8j9/nTl3E=
+golang.org/x/arch v0.0.0-20210923205945-b76863e36670/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8=
+golang.org/x/arch v0.8.0 h1:3wRIsP3pM4yUptoR96otTUOXI367OS0+c9eeRi9doIc=
+golang.org/x/arch v0.8.0/go.mod h1:FEVrYAQjsQXMVJ1nsMoVVXPZg6p2JE2mx8psSWTDQys=
+golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
+golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
+golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
+golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
+golang.org/x/crypto v0.36.0 h1:AnAEvhDddvBdpY+uR+MyHmuZzzNqXSe/GvuDeob5L34=
+golang.org/x/crypto v0.36.0/go.mod h1:Y4J0ReaxCR1IMaabaSMugxJES1EpwhBHhv2bDHklZvc=
+golang.org/x/exp v0.0.0-20180321215751-8460e604b9de/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
+golang.org/x/exp v0.0.0-20180807140117-3d87b88a115f/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
+golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
+golang.org/x/exp v0.0.0-20190125153040-c74c464bbbf2/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
+golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
+golang.org/x/exp v0.0.0-20191002040644-a1355ae1e2c3/go.mod h1:NOZ3BPKG0ec/BKJQgnvsSFpcKLM5xXVWnvZS97DWHgE=
+golang.org/x/exp v0.0.0-20250218142911-aa4b98e5adaa h1:t2QcU6V556bFjYgu4L6C+6VrCPyJZ+eyRsABUPs1mz4=
+golang.org/x/exp v0.0.0-20250218142911-aa4b98e5adaa/go.mod h1:BHOTPb3L19zxehTsLoJXVaTktb06DFgmdW6Wb9s8jqk=
+golang.org/x/image v0.0.0-20180708004352-c73c2afc3b81/go.mod h1:ux5Hcp/YLpHSI86hEcLt0YII63i6oz57MZXIpbrjZUs=
+golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js=
+golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
+golang.org/x/image v0.0.0-20190910094157-69e4b8554b2a/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
+golang.org/x/image v0.0.0-20200119044424-58c23975cae1/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
+golang.org/x/image v0.0.0-20200430140353-33d19683fad8/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
+golang.org/x/image v0.0.0-20200618115811-c13761719519/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
+golang.org/x/image v0.0.0-20201208152932-35266b937fa6/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
+golang.org/x/image v0.0.0-20210216034530-4410531fe030/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
+golang.org/x/image v0.22.0 h1:UtK5yLUzilVrkjMAZAZ34DXGpASN8i8pj8g+O+yd10g=
+golang.org/x/image v0.22.0/go.mod h1:9hPFhljd4zZ1GNSIZJ49sqbp45GKK9t6w+iXvGqZUz4=
+golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
+golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU=
+golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
+golang.org/x/lint v0.0.0-20210508222113-6edffad5e616/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY=
+golang.org/x/mobile v0.0.0-20190719004257-d2bd2a29d028/go.mod h1:E/iHnbuqvinMTCcRqshq8CkpyQDoeVncDDYHnLhea+o=
+golang.org/x/mod v0.1.0/go.mod h1:0QHyrYULN0/3qlju5TqG8bIK38QM8yzMo5ekMj3DlcY=
+golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg=
+golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
+golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
+golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
+golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
+golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
+golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
+golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
+golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
+golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
+golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
+golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
+golang.org/x/net v0.0.0-20200822124328-c89045814202/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA=
+golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU=
+golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM=
+golang.org/x/net v0.0.0-20210614182718-04defd469f4e/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
+golang.org/x/net v0.38.0 h1:vRMAPTMaeGqVhG5QyLJHqNDwecKTomGeqbnfZyKlBI8=
+golang.org/x/net v0.38.0/go.mod h1:ivrbrMbzFq5J41QOQh0siUuly180yBYtLp+CKbEaFx8=
+golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
+golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
+golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
+golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
+golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
+golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
+golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
+golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
+golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
+golang.org/x/sync v0.12.0 h1:MHc5BpPuC30uJk597Ri8TV3CNZcTLu6B6z4lJy+g6Jw=
+golang.org/x/sync v0.12.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA=
+golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
+golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
+golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20210124154548-22da62e12c0c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20210304124612-50617c2ba197/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.31.0 h1:ioabZlmFYtWhL+TRYpcnNlLwhyxaM9kWTDEmfnprqik=
+golang.org/x/sys v0.31.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k=
+golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
+golang.org/x/term v0.30.0 h1:PQ39fJZ+mfadBm0y5WlL4vlM7Sx1Hgf13sMIY2+QS9Y=
+golang.org/x/term v0.30.0/go.mod h1:NYYFdzHoI5wRh/h5tDMdMqCqPJZEuNqVR5xJLd/n67g=
+golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
+golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
+golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
+golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
+golang.org/x/text v0.23.0 h1:D71I7dUrlY+VX0gQShAThNGHFxZ13dGLBHQLVl1mJlY=
+golang.org/x/text v0.23.0/go.mod h1:/BLNzu4aZCJ1+kcD0DNRotWKage4q2rGVAg4o22unh4=
+golang.org/x/tools v0.0.0-20180525024113-a5b4c53f6e8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
+golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
+golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
+golang.org/x/tools v0.0.0-20190206041539-40960b6deb8e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
+golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY=
+golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
+golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
+golang.org/x/tools v0.0.0-20190927191325-030b2cf1153e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
+golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
+golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
+golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=
+golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
+golang.org/x/tools v0.1.4/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
+golang.org/x/tools v0.30.0 h1:BgcpHewrV5AUp2G9MebG4XPFI1E2W41zU1SaqVA9vJY=
+golang.org/x/tools v0.30.0/go.mod h1:c347cR/OJfw5TI+GfX7RUPNMdDRRbjvYTS0jPyvsVtY=
+golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
+golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
+golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
+golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE=
+golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
+gonum.org/v1/gonum v0.0.0-20180816165407-929014505bf4/go.mod h1:Y+Yx5eoAFn32cQvJDxZx5Dpnq+c3wtXuadVZAcxbbBo=
+gonum.org/v1/gonum v0.8.2/go.mod h1:oe/vMfY3deqTw+1EZJhuvEW2iwGF1bW9wwu7XCu0+v0=
+gonum.org/v1/gonum v0.9.3/go.mod h1:TZumC3NeyVQskjXqmyWt4S3bINhy7B4eYwW69EbyX+0=
+gonum.org/v1/gonum v0.15.0 h1:2lYxjRbTYyxkJxlhC+LvJIx3SsANPdRybu1tGj9/OrQ=
+gonum.org/v1/gonum v0.15.0/go.mod h1:xzZVBJBtS+Mz4q0Yl2LJTk+OxOg4jiXZ7qBoM0uISGo=
+gonum.org/v1/netlib v0.0.0-20190313105609-8cb42192e0e0/go.mod h1:wa6Ws7BG/ESfp6dHfk7C6KdzKA7wR7u/rKwOGE66zvw=
+gonum.org/v1/plot v0.0.0-20190515093506-e2840ee46a6b/go.mod h1:Wt8AAjI+ypCyYX3nZBvf6cAIx93T+c/OS2HFAYskSZc=
+gonum.org/v1/plot v0.9.0/go.mod h1:3Pcqqmp6RHvJI72kgb8fThyUnav364FOsdDo2aGW5lY=
+google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM=
+google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
+google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc=
+google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc=
+google.golang.org/genproto v0.0.0-20200513103714-09dca8ec2884/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
+google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo=
+google.golang.org/genproto v0.0.0-20210630183607-d20f26d13c79/go.mod h1:yiaVoXHpRzHGyxV3o4DktVWY4mSUErTKaeEOq6C3t3U=
+google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c=
+google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg=
+google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY=
+google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk=
+google.golang.org/grpc v1.33.1/go.mod h1:fr5YgcSWrqhRRxogOsw7RzIpsmvOZ6IcH4kBYTpR3n0=
+google.golang.org/grpc v1.36.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU=
+google.golang.org/grpc v1.38.0/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM=
+google.golang.org/grpc v1.39.0/go.mod h1:PImNr+rS9TWYb2O4/emRugxiyHZ5JyHW5F+RPnDzfrE=
+google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8=
+google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0=
+google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM=
+google.golang.org/protobuf v1.20.1-0.20200309200217-e05f789c0967/go.mod h1:A+miEFZTKqfCUM6K7xSMQL9OKL/b6hQv+e19PK+JZNE=
+google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzikPIcrTAo=
+google.golang.org/protobuf v1.22.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU=
+google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU=
+google.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU=
+google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c=
+google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw=
+google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc=
+google.golang.org/protobuf v1.27.1/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc=
+google.golang.org/protobuf v1.34.1 h1:9ddQBjfCyZPOHPUiPxpYESBLc+T8P3E+Vo4IbKZgFWg=
+google.golang.org/protobuf v1.34.1/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos=
+gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
+gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=
+gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q=
+gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
+gopkg.in/yaml.v2 v2.2.3/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
+gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
+gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
+gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
+gorgonia.org/vecf32 v0.9.0 h1:PClazic1r+JVJ1dEzRXgeiVl4g1/Hf/w+wUSqnco1Xg=
+gorgonia.org/vecf32 v0.9.0/go.mod h1:NCc+5D2oxddRL11hd+pCB1PEyXWOyiQxfZ/1wwhOXCA=
+gorgonia.org/vecf64 v0.9.0 h1:bgZDP5x0OzBF64PjMGC3EvTdOoMEcmfAh1VCUnZFm1A=
+gorgonia.org/vecf64 v0.9.0/go.mod h1:hp7IOWCnRiVQKON73kkC/AUMtEXyf9kGlVrtPQ9ccVA=
+honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
+honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
+nullprogram.com/x/optparse v1.0.0/go.mod h1:KdyPE+Igbe0jQUrVfMqDMeJQIJZEuyV7pjYmp6pbG50=
+rsc.io/pdf v0.1.1/go.mod h1:n8OzWcQ6Sp37PL01nO98y4iUCRdTGarVfzxY20ICaU4=
diff --git a/integration/README.md b/integration/README.md
new file mode 100644
index 0000000..e2bdd6b
--- /dev/null
+++ b/integration/README.md
@@ -0,0 +1,11 @@
+# Integration Tests
+
+This directory contains integration tests to exercise Ollama end-to-end to verify behavior
+
+By default, these tests are disabled so `go test ./...` will exercise only unit tests.  To run integration tests you must pass the integration tag.  `go test -tags=integration ./...`
+
+
+The integration tests have 2 modes of operating.
+
+1. By default, they will start the server on a random port, run the tests, and then shutdown the server.
+2. If `OLLAMA_TEST_EXISTING` is set to a non-empty string, the tests will run against an existing running server, which can be remote
diff --git a/integration/api_test.go b/integration/api_test.go
new file mode 100644
index 0000000..d24f500
--- /dev/null
+++ b/integration/api_test.go
@@ -0,0 +1,412 @@
+//go:build integration
+
+package integration
+
+import (
+	"bytes"
+	"context"
+	"fmt"
+	"math/rand"
+	"strings"
+	"testing"
+	"time"
+
+	"github.com/ollama/ollama/api"
+)
+
+func TestAPIGenerate(t *testing.T) {
+	initialTimeout := 60 * time.Second
+	streamTimeout := 30 * time.Second
+	ctx, cancel := context.WithTimeout(context.Background(), 1*time.Minute)
+	defer cancel()
+	// Set up the test data
+	req := api.GenerateRequest{
+		Model:  smol,
+		Prompt: "why is the sky blue? be brief",
+		Options: map[string]interface{}{
+			"temperature": 0,
+			"seed":        123,
+		},
+	}
+	anyResp := []string{"rayleigh", "scattering"}
+
+	client, _, cleanup := InitServerConnection(ctx, t)
+	defer cleanup()
+	if err := PullIfMissing(ctx, client, req.Model); err != nil {
+		t.Fatalf("pull failed %s", err)
+	}
+
+	tests := []struct {
+		name   string
+		stream bool
+	}{
+		{
+			name:   "stream",
+			stream: true,
+		},
+		{
+			name:   "no_stream",
+			stream: false,
+		},
+	}
+
+	for _, test := range tests {
+		t.Run(test.name, func(t *testing.T) {
+			stallTimer := time.NewTimer(initialTimeout)
+			var buf bytes.Buffer
+			fn := func(response api.GenerateResponse) error {
+				// Fields that must always be present
+				if response.Model == "" {
+					t.Errorf("response missing model: %#v", response)
+				}
+				if response.Done {
+					// Required fields for final updates:
+					if response.DoneReason == "" && *req.Stream {
+						// TODO - is the lack of done reason on non-stream a bug?
+						t.Errorf("final response missing done_reason: %#v", response)
+					}
+					if response.Metrics.TotalDuration == 0 {
+						t.Errorf("final response missing total_duration: %#v", response)
+					}
+					if response.Metrics.LoadDuration == 0 {
+						t.Errorf("final response missing load_duration: %#v", response)
+					}
+					if response.Metrics.PromptEvalDuration == 0 {
+						t.Errorf("final response missing prompt_eval_duration: %#v", response)
+					}
+					if response.Metrics.EvalCount == 0 {
+						t.Errorf("final response missing eval_count: %#v", response)
+					}
+					if response.Metrics.EvalDuration == 0 {
+						t.Errorf("final response missing eval_duration: %#v", response)
+					}
+					if len(response.Context) == 0 {
+						t.Errorf("final response missing context: %#v", response)
+					}
+
+					// Note: caching can result in no prompt eval count, so this can't be verified reliably
+					// if response.Metrics.PromptEvalCount == 0 {
+					// 	t.Errorf("final response missing prompt_eval_count: %#v", response)
+					// }
+
+				} // else incremental response, nothing to check right now...
+				buf.Write([]byte(response.Response))
+				if !stallTimer.Reset(streamTimeout) {
+					return fmt.Errorf("stall was detected while streaming response, aborting")
+				}
+				return nil
+			}
+
+			done := make(chan int)
+			var genErr error
+			go func() {
+				req.Stream = &test.stream
+				req.Options["seed"] = rand.Int() // bust cache for prompt eval results
+				genErr = client.Generate(ctx, &req, fn)
+				done <- 0
+			}()
+
+			select {
+			case <-stallTimer.C:
+				if buf.Len() == 0 {
+					t.Errorf("generate never started.  Timed out after :%s", initialTimeout.String())
+				} else {
+					t.Errorf("generate stalled.  Response so far:%s", buf.String())
+				}
+			case <-done:
+				if genErr != nil {
+					t.Fatalf("failed with %s request prompt %s ", req.Model, req.Prompt)
+				}
+				// Verify the response contains the expected data
+				response := buf.String()
+				atLeastOne := false
+				for _, resp := range anyResp {
+					if strings.Contains(strings.ToLower(response), resp) {
+						atLeastOne = true
+						break
+					}
+				}
+				if !atLeastOne {
+					t.Errorf("none of %v found in %s", anyResp, response)
+				}
+			case <-ctx.Done():
+				t.Error("outer test context done while waiting for generate")
+			}
+		})
+	}
+
+	// Validate PS while we're at it...
+	resp, err := client.ListRunning(ctx)
+	if err != nil {
+		t.Fatalf("list models API error: %s", err)
+	}
+	if resp == nil || len(resp.Models) == 0 {
+		t.Fatalf("list models API returned empty list while model should still be loaded")
+	}
+	// Find the model we just loaded and verify some attributes
+	found := false
+	for _, model := range resp.Models {
+		if strings.Contains(model.Name, req.Model) {
+			found = true
+			if model.Model == "" {
+				t.Errorf("model field omitted: %#v", model)
+			}
+			if model.Size == 0 {
+				t.Errorf("size omitted: %#v", model)
+			}
+			if model.Digest == "" {
+				t.Errorf("digest omitted: %#v", model)
+			}
+			verifyModelDetails(t, model.Details)
+			var nilTime time.Time
+			if model.ExpiresAt == nilTime {
+				t.Errorf("expires_at omitted: %#v", model)
+			}
+			// SizeVRAM could be zero.
+		}
+	}
+	if !found {
+		t.Errorf("unable to locate running model: %#v", resp)
+	}
+}
+
+func TestAPIChat(t *testing.T) {
+	initialTimeout := 60 * time.Second
+	streamTimeout := 30 * time.Second
+	ctx, cancel := context.WithTimeout(context.Background(), 1*time.Minute)
+	defer cancel()
+	// Set up the test data
+	req := api.ChatRequest{
+		Model: smol,
+		Messages: []api.Message{
+			{
+				Role:    "user",
+				Content: "why is the sky blue?  be brief",
+			},
+		},
+		Options: map[string]interface{}{
+			"temperature": 0,
+			"seed":        123,
+		},
+	}
+	anyResp := []string{"rayleigh", "scattering"}
+
+	client, _, cleanup := InitServerConnection(ctx, t)
+	defer cleanup()
+	if err := PullIfMissing(ctx, client, req.Model); err != nil {
+		t.Fatalf("pull failed %s", err)
+	}
+
+	tests := []struct {
+		name   string
+		stream bool
+	}{
+		{
+			name:   "stream",
+			stream: true,
+		},
+		{
+			name:   "no_stream",
+			stream: false,
+		},
+	}
+
+	for _, test := range tests {
+		t.Run(test.name, func(t *testing.T) {
+			stallTimer := time.NewTimer(initialTimeout)
+			var buf bytes.Buffer
+			fn := func(response api.ChatResponse) error {
+				// Fields that must always be present
+				if response.Model == "" {
+					t.Errorf("response missing model: %#v", response)
+				}
+				if response.Done {
+					// Required fields for final updates:
+					var nilTime time.Time
+					if response.CreatedAt == nilTime {
+						t.Errorf("final response missing total_duration: %#v", response)
+					}
+					if response.DoneReason == "" {
+						t.Errorf("final response missing done_reason: %#v", response)
+					}
+					if response.Metrics.TotalDuration == 0 {
+						t.Errorf("final response missing total_duration: %#v", response)
+					}
+					if response.Metrics.LoadDuration == 0 {
+						t.Errorf("final response missing load_duration: %#v", response)
+					}
+					if response.Metrics.PromptEvalDuration == 0 {
+						t.Errorf("final response missing prompt_eval_duration: %#v", response)
+					}
+					if response.Metrics.EvalCount == 0 {
+						t.Errorf("final response missing eval_count: %#v", response)
+					}
+					if response.Metrics.EvalDuration == 0 {
+						t.Errorf("final response missing eval_duration: %#v", response)
+					}
+
+					if response.Metrics.PromptEvalCount == 0 {
+						t.Errorf("final response missing prompt_eval_count: %#v", response)
+					}
+				} // else incremental response, nothing to check right now...
+				buf.Write([]byte(response.Message.Content))
+				if !stallTimer.Reset(streamTimeout) {
+					return fmt.Errorf("stall was detected while streaming response, aborting")
+				}
+				return nil
+			}
+
+			done := make(chan int)
+			var genErr error
+			go func() {
+				req.Stream = &test.stream
+				req.Options["seed"] = rand.Int() // bust cache for prompt eval results
+				genErr = client.Chat(ctx, &req, fn)
+				done <- 0
+			}()
+
+			select {
+			case <-stallTimer.C:
+				if buf.Len() == 0 {
+					t.Errorf("chat never started.  Timed out after :%s", initialTimeout.String())
+				} else {
+					t.Errorf("chat stalled.  Response so far:%s", buf.String())
+				}
+			case <-done:
+				if genErr != nil {
+					t.Fatalf("failed with %s request prompt %v", req.Model, req.Messages)
+				}
+				// Verify the response contains the expected data
+				response := buf.String()
+				atLeastOne := false
+				for _, resp := range anyResp {
+					if strings.Contains(strings.ToLower(response), resp) {
+						atLeastOne = true
+						break
+					}
+				}
+				if !atLeastOne {
+					t.Errorf("none of %v found in %s", anyResp, response)
+				}
+			case <-ctx.Done():
+				t.Error("outer test context done while waiting for chat")
+			}
+		})
+	}
+}
+
+func TestAPIListModels(t *testing.T) {
+	ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
+	defer cancel()
+	client, _, cleanup := InitServerConnection(ctx, t)
+	defer cleanup()
+
+	// Make sure we have at least one model so an empty list can be considered a failure
+	if err := PullIfMissing(ctx, client, smol); err != nil {
+		t.Fatalf("pull failed %s", err)
+	}
+
+	resp, err := client.List(ctx)
+	if err != nil {
+		t.Fatalf("unable to list models: %s", err)
+	}
+	if len(resp.Models) == 0 {
+		t.Fatalf("list should not be empty")
+	}
+	model := resp.Models[0]
+	if model.Name == "" {
+		t.Errorf("first model name empty: %#v", model)
+	}
+	var nilTime time.Time
+	if model.ModifiedAt == nilTime {
+		t.Errorf("first model modified_at empty: %#v", model)
+	}
+	if model.Size == 0 {
+		t.Errorf("first model size empty: %#v", model)
+	}
+	if model.Digest == "" {
+		t.Errorf("first model digest empty: %#v", model)
+	}
+	verifyModelDetails(t, model.Details)
+}
+
+func verifyModelDetails(t *testing.T, details api.ModelDetails) {
+	if details.Format == "" {
+		t.Errorf("first model details.format empty: %#v", details)
+	}
+	if details.Family == "" {
+		t.Errorf("first model details.family empty: %#v", details)
+	}
+	if details.ParameterSize == "" {
+		t.Errorf("first model details.parameter_size empty: %#v", details)
+	}
+	if details.QuantizationLevel == "" {
+		t.Errorf("first model details.quantization_level empty: %#v", details)
+	}
+}
+
+func TestAPIShowModel(t *testing.T) {
+	modelName := "llama3.2"
+	ctx, cancel := context.WithTimeout(context.Background(), 1*time.Minute)
+	defer cancel()
+	client, _, cleanup := InitServerConnection(ctx, t)
+	defer cleanup()
+
+	if err := PullIfMissing(ctx, client, modelName); err != nil {
+		t.Fatalf("pull failed %s", err)
+	}
+	resp, err := client.Show(ctx, &api.ShowRequest{Name: modelName})
+	if err != nil {
+		t.Fatalf("unable to show model: %s", err)
+	}
+	if resp.License == "" {
+		t.Errorf("%s missing license: %#v", modelName, resp)
+	}
+	if resp.Modelfile == "" {
+		t.Errorf("%s missing modelfile: %#v", modelName, resp)
+	}
+	if resp.Parameters == "" {
+		t.Errorf("%s missing parameters: %#v", modelName, resp)
+	}
+	if resp.Template == "" {
+		t.Errorf("%s missing template: %#v", modelName, resp)
+	}
+	// llama3 omits system
+	verifyModelDetails(t, resp.Details)
+	// llama3 ommits messages
+	if len(resp.ModelInfo) == 0 {
+		t.Errorf("%s missing model_info: %#v", modelName, resp)
+	}
+	// llama3 omits projectors
+	var nilTime time.Time
+	if resp.ModifiedAt == nilTime {
+		t.Errorf("%s missing modified_at: %#v", modelName, resp)
+	}
+}
+
+func TestAPIEmbeddings(t *testing.T) {
+	ctx, cancel := context.WithTimeout(context.Background(), 1*time.Minute)
+	defer cancel()
+	client, _, cleanup := InitServerConnection(ctx, t)
+	defer cleanup()
+	req := api.EmbeddingRequest{
+		Model:  "orca-mini",
+		Prompt: "why is the sky blue?",
+		Options: map[string]interface{}{
+			"temperature": 0,
+			"seed":        123,
+		},
+	}
+
+	if err := PullIfMissing(ctx, client, req.Model); err != nil {
+		t.Fatalf("pull failed %s", err)
+	}
+
+	resp, err := client.Embeddings(ctx, &req)
+	if err != nil {
+		t.Fatalf("embeddings call failed %s", err)
+	}
+	if len(resp.Embedding) == 0 {
+		t.Errorf("zero length embedding response")
+	}
+}
diff --git a/integration/basic_test.go b/integration/basic_test.go
new file mode 100644
index 0000000..13c2f22
--- /dev/null
+++ b/integration/basic_test.go
@@ -0,0 +1,106 @@
+//go:build integration
+
+package integration
+
+import (
+	"context"
+	"log/slog"
+	"os"
+	"runtime"
+	"testing"
+	"time"
+
+	"github.com/ollama/ollama/api"
+	"github.com/stretchr/testify/require"
+)
+
+func TestBlueSky(t *testing.T) {
+	ctx, cancel := context.WithTimeout(context.Background(), 2*time.Minute)
+	defer cancel()
+	// Set up the test data
+	req := api.GenerateRequest{
+		Model:  smol,
+		Prompt: "why is the sky blue?",
+		Stream: &stream,
+		Options: map[string]any{
+			"temperature": 0,
+			"seed":        123,
+		},
+	}
+	GenerateTestHelper(ctx, t, req, []string{"rayleigh", "scattering"})
+}
+
+func TestUnicode(t *testing.T) {
+	skipUnderMinVRAM(t, 6)
+	ctx, cancel := context.WithTimeout(context.Background(), 3*time.Minute)
+	defer cancel()
+	// Set up the test data
+	req := api.GenerateRequest{
+		// DeepSeek has a Unicode tokenizer regex, making it a unicode torture test
+		Model:  "deepseek-coder-v2:16b-lite-instruct-q2_K",
+		Prompt: "天空为什么是蓝色的?",
+		Stream: &stream,
+		Options: map[string]any{
+			"temperature": 0,
+			"seed":        123,
+			// Workaround deepseek context shifting bug
+			"num_ctx":     8192,
+			"num_predict": 2048,
+		},
+	}
+	client, _, cleanup := InitServerConnection(ctx, t)
+	defer cleanup()
+	require.NoError(t, PullIfMissing(ctx, client, req.Model))
+	DoGenerate(ctx, t, client, req, []string{"散射", "频率"}, 120*time.Second, 120*time.Second)
+}
+
+func TestExtendedUnicodeOutput(t *testing.T) {
+	ctx, cancel := context.WithTimeout(context.Background(), 2*time.Minute)
+	defer cancel()
+	// Set up the test data
+	req := api.GenerateRequest{
+		Model:  "gemma2:2b",
+		Prompt: "Output some smily face emoji",
+		Stream: &stream,
+		Options: map[string]any{
+			"temperature": 0,
+			"seed":        123,
+		},
+	}
+	client, _, cleanup := InitServerConnection(ctx, t)
+	defer cleanup()
+	require.NoError(t, PullIfMissing(ctx, client, req.Model))
+	DoGenerate(ctx, t, client, req, []string{"😀", "😊", "😁", "😂", "😄", "😃"}, 120*time.Second, 120*time.Second)
+}
+
+func TestUnicodeModelDir(t *testing.T) {
+	// This is only useful for Windows with utf-16 characters, so skip this test for other platforms
+	if runtime.GOOS != "windows" {
+		t.Skip("Unicode test only applicable to windows")
+	}
+	// Only works for local testing
+	if os.Getenv("OLLAMA_TEST_EXISTING") != "" {
+		t.Skip("TestUnicodeModelDir only works for local testing, skipping")
+	}
+
+	modelDir, err := os.MkdirTemp("", "ollama_埃")
+	require.NoError(t, err)
+	defer os.RemoveAll(modelDir)
+	slog.Info("unicode", "OLLAMA_MODELS", modelDir)
+
+	t.Setenv("OLLAMA_MODELS", modelDir)
+
+	ctx, cancel := context.WithTimeout(context.Background(), 2*time.Minute)
+	defer cancel()
+
+	req := api.GenerateRequest{
+		Model:  smol,
+		Prompt: "why is the sky blue?",
+		Stream: &stream,
+		Options: map[string]any{
+			"temperature": 0,
+			"seed":        123,
+		},
+	}
+	GenerateTestHelper(ctx, t, req, []string{"rayleigh", "scattering"})
+}
diff --git a/integration/concurrency_test.go b/integration/concurrency_test.go
new file mode 100644
index 0000000..dbf1e6f
--- /dev/null
+++ b/integration/concurrency_test.go
@@ -0,0 +1,274 @@
+//go:build integration
+
+package integration
+
+import (
+	"context"
+	"log/slog"
+	"os"
+	"strconv"
+	"sync"
+	"testing"
+	"time"
+
+	"github.com/stretchr/testify/require"
+
+	"github.com/ollama/ollama/api"
+	"github.com/ollama/ollama/format"
+)
+
+func TestMultiModelConcurrency(t *testing.T) {
+	var (
+		req = [2]api.GenerateRequest{
+			{
+				Model:     "llama3.2:1b",
+				Prompt:    "why is the ocean blue?",
+				Stream:    &stream,
+				KeepAlive: &api.Duration{Duration: 10 * time.Second},
+				Options: map[string]any{
+					"seed":        42,
+					"temperature": 0.0,
+				},
+			}, {
+				Model:     "tinydolphin",
+				Prompt:    "what is the origin of the us thanksgiving holiday?",
+				Stream:    &stream,
+				KeepAlive: &api.Duration{Duration: 10 * time.Second},
+				Options: map[string]any{
+					"seed":        42,
+					"temperature": 0.0,
+				},
+			},
+		}
+		resp = [2][]string{
+			{"sunlight"},
+			{"england", "english", "massachusetts", "pilgrims", "british", "festival"},
+		}
+	)
+	var wg sync.WaitGroup
+	wg.Add(len(req))
+	ctx, cancel := context.WithTimeout(context.Background(), time.Second*240)
+	defer cancel()
+
+	client, _, cleanup := InitServerConnection(ctx, t)
+	defer cleanup()
+
+	for i := 0; i < len(req); i++ {
+		require.NoError(t, PullIfMissing(ctx, client, req[i].Model))
+	}
+
+	for i := 0; i < len(req); i++ {
+		go func(i int) {
+			defer wg.Done()
+			// Note: CPU based inference can crawl so don't give up too quickly
+			DoGenerate(ctx, t, client, req[i], resp[i], 90*time.Second, 30*time.Second)
+		}(i)
+	}
+	wg.Wait()
+}
+
+func TestIntegrationConcurrentPredict(t *testing.T) {
+	req, resp := GenerateRequests()
+	reqLimit := len(req)
+	iterLimit := 5
+
+	if s := os.Getenv("OLLAMA_MAX_VRAM"); s != "" {
+		maxVram, err := strconv.ParseUint(s, 10, 64)
+		require.NoError(t, err)
+		// Don't hammer on small VRAM cards...
+		if maxVram < 4*format.GibiByte {
+			reqLimit = min(reqLimit, 2)
+			iterLimit = 2
+		}
+	}
+
+	ctx, cancel := context.WithTimeout(context.Background(), 9*time.Minute)
+	defer cancel()
+	client, _, cleanup := InitServerConnection(ctx, t)
+	defer cleanup()
+
+	// Get the server running (if applicable) warm the model up with a single initial request
+	DoGenerate(ctx, t, client, req[0], resp[0], 60*time.Second, 10*time.Second)
+
+	var wg sync.WaitGroup
+	wg.Add(reqLimit)
+	for i := 0; i < reqLimit; i++ {
+		go func(i int) {
+			defer wg.Done()
+			for j := 0; j < iterLimit; j++ {
+				slog.Info("Starting", "req", i, "iter", j)
+				// On slower GPUs it can take a while to process the concurrent requests
+				// so we allow a much longer initial timeout
+				DoGenerate(ctx, t, client, req[i], resp[i], 120*time.Second, 20*time.Second)
+			}
+		}(i)
+	}
+	wg.Wait()
+}
+
+// Stress the system if we know how much VRAM it has, and attempt to load more models than will fit
+func TestMultiModelStress(t *testing.T) {
+	s := os.Getenv("OLLAMA_MAX_VRAM") // TODO - discover actual VRAM
+	if s == "" {
+		t.Skip("OLLAMA_MAX_VRAM not specified, can't pick the right models for the stress test")
+	}
+
+	maxVram, err := strconv.ParseUint(s, 10, 64)
+	if err != nil {
+		t.Fatal(err)
+	}
+	if maxVram < 2*format.GibiByte {
+		t.Skip("VRAM less than 2G, skipping model stress tests")
+	}
+
+	type model struct {
+		name string
+		size uint64 // Approximate amount of VRAM they typically use when fully loaded in VRAM
+	}
+
+	smallModels := []model{
+		{
+			name: "llama3.2:1b",
+			size: 2876 * format.MebiByte,
+		},
+		{
+			name: "phi",
+			size: 2616 * format.MebiByte,
+		},
+		{
+			name: "gemma:2b",
+			size: 2364 * format.MebiByte,
+		},
+		{
+			name: "stable-code:3b",
+			size: 2608 * format.MebiByte,
+		},
+		{
+			name: "starcoder2:3b",
+			size: 2166 * format.MebiByte,
+		},
+	}
+	mediumModels := []model{
+		{
+			name: "llama2",
+			size: 5118 * format.MebiByte,
+		},
+		{
+			name: "mistral",
+			size: 4620 * format.MebiByte,
+		},
+		{
+			name: "orca-mini:7b",
+			size: 5118 * format.MebiByte,
+		},
+		{
+			name: "dolphin-mistral",
+			size: 4620 * format.MebiByte,
+		},
+		{
+			name: "gemma:7b",
+			size: 5000 * format.MebiByte,
+		},
+		{
+			name: "codellama:7b",
+			size: 5118 * format.MebiByte,
+		},
+	}
+
+	// These seem to be too slow to be useful...
+	// largeModels := []model{
+	// 	{
+	// 		name: "llama2:13b",
+	// 		size: 7400 * format.MebiByte,
+	// 	},
+	// 	{
+	// 		name: "codellama:13b",
+	// 		size: 7400 * format.MebiByte,
+	// 	},
+	// 	{
+	// 		name: "orca-mini:13b",
+	// 		size: 7400 * format.MebiByte,
+	// 	},
+	// 	{
+	// 		name: "gemma:7b",
+	// 		size: 5000 * format.MebiByte,
+	// 	},
+	// 	{
+	// 		name: "starcoder2:15b",
+	// 		size: 9100 * format.MebiByte,
+	// 	},
+	// }
+
+	var chosenModels []model
+	switch {
+	case maxVram < 10000*format.MebiByte:
+		slog.Info("selecting small models")
+		chosenModels = smallModels
+	// case maxVram < 30000*format.MebiByte:
+	default:
+		slog.Info("selecting medium models")
+		chosenModels = mediumModels
+		// default:
+		// 	slog.Info("selecting large models")
+		// 	chosenModels = largeModels
+	}
+
+	req, resp := GenerateRequests()
+
+	for i := range req {
+		if i > len(chosenModels) {
+			break
+		}
+		req[i].Model = chosenModels[i].name
+	}
+
+	ctx, cancel := context.WithTimeout(context.Background(), 15*time.Minute) // TODO baseline -- 10m too short
+	defer cancel()
+	client, _, cleanup := InitServerConnection(ctx, t)
+	defer cleanup()
+
+	// Make sure all the models are pulled before we get started
+	for _, r := range req {
+		require.NoError(t, PullIfMissing(ctx, client, r.Model))
+	}
+
+	var wg sync.WaitGroup
+	consumed := uint64(256 * format.MebiByte) // Assume some baseline usage
+	for i := 0; i < len(req); i++ {
+		// Always get at least 2 models, but don't overshoot VRAM too much or we'll take too long
+		if i > 1 && consumed > maxVram {
+			slog.Info("achieved target vram exhaustion", "count", i, "vram", format.HumanBytes2(maxVram), "models", format.HumanBytes2(consumed))
+			break
+		}
+		consumed += chosenModels[i].size
+		slog.Info("target vram", "count", i, "vram", format.HumanBytes2(maxVram), "models", format.HumanBytes2(consumed))
+
+		wg.Add(1)
+		go func(i int) {
+			defer wg.Done()
+			for j := 0; j < 3; j++ {
+				slog.Info("Starting", "req", i, "iter", j, "model", req[i].Model)
+				DoGenerate(ctx, t, client, req[i], resp[i], 120*time.Second, 5*time.Second)
+			}
+		}(i)
+	}
+	go func() {
+		for {
+			time.Sleep(2 * time.Second)
+			select {
+			case <-ctx.Done():
+				return
+			default:
+				models, err := client.ListRunning(ctx)
+				if err != nil {
+					slog.Warn("failed to list running models", "error", err)
+					continue
+				}
+				for _, m := range models.Models {
+					slog.Info("loaded model snapshot", "model", m)
+				}
+			}
+		}
+	}()
+	wg.Wait()
+}
diff --git a/integration/context_test.go b/integration/context_test.go
new file mode 100644
index 0000000..409d913
--- /dev/null
+++ b/integration/context_test.go
@@ -0,0 +1,65 @@
+//go:build integration
+
+package integration
+
+import (
+	"context"
+	"testing"
+	"time"
+
+	"github.com/ollama/ollama/api"
+)
+
+func TestLongInputContext(t *testing.T) {
+	// Setting NUM_PARALLEL to 1 ensures the allocated context is exactly what
+	// we asked for and there is nothing extra that we could spill over into
+	t.Setenv("OLLAMA_NUM_PARALLEL", "1")
+
+	// Longer needed for small footprint GPUs
+	ctx, cancel := context.WithTimeout(context.Background(), 5*time.Minute)
+	defer cancel()
+	// Set up the test data
+	req := api.GenerateRequest{
+		Model:  "llama2",
+		Prompt: "Oh, don’t speak to me of Austria. Perhaps I don’t understand things, but Austria never has wished, and does not wish, for war. She is betraying us! Russia alone must save Europe. Our gracious sovereign recognizes his high vocation and will be true to it. That is the one thing I have faith in! Our good and wonderful sovereign has to perform the noblest role on earth, and he is so virtuous and noble that God will not forsake him. He will fulfill his vocation and crush the hydra of revolution, which has become more terrible than ever in the person of this murderer and villain! We alone must avenge the blood of the just one.... Whom, I ask you, can we rely on?... England with her commercial spirit will not and cannot understand the Emperor Alexander’s loftiness of soul. She has refused to evacuate Malta. She wanted to find, and still seeks, some secret motive in our actions. What answer did Novosíltsev get? None. The English have not understood and cannot understand the self-abnegation of our Emperor who wants nothing for himself, but only desires the good of mankind. And what have they promised? Nothing! And what little they have promised they will not perform! Prussia has always declared that Buonaparte is invincible, and that all Europe is powerless before him.... And I don’t believe a word that Hardenburg says, or Haugwitz either. This famous Prussian neutrality is just a trap. I have faith only in God and the lofty destiny of our adored monarch. He will save Europe! What country is this referring to?",
+		Stream: &stream,
+		Options: map[string]any{
+			"temperature": 0,
+			"seed":        123,
+			"num_ctx":     128,
+		},
+	}
+	client, _, cleanup := InitServerConnection(ctx, t)
+	defer cleanup()
+	if err := PullIfMissing(ctx, client, req.Model); err != nil {
+		t.Fatalf("PullIfMissing failed: %v", err)
+	}
+	DoGenerate(ctx, t, client, req, []string{"russia", "germany", "france", "england", "austria", "prussia"}, 120*time.Second, 10*time.Second)
+}
+
+func TestContextExhaustion(t *testing.T) {
+	// Setting NUM_PARALLEL to 1 ensures the allocated context is exactly what
+	// we asked for and there is nothing extra that we could spill over into
+	t.Setenv("OLLAMA_NUM_PARALLEL", "1")
+
+	// Longer needed for small footprint GPUs
+	ctx, cancel := context.WithTimeout(context.Background(), 5*time.Minute)
+	defer cancel()
+	// Set up the test data
+	req := api.GenerateRequest{
+		Model:  "llama2",
+		Prompt: "Write me a story with a ton of emojis?",
+		Stream: &stream,
+		Options: map[string]any{
+			"temperature": 0,
+			"seed":        123,
+			"num_ctx":     128,
+		},
+	}
+	client, _, cleanup := InitServerConnection(ctx, t)
+	defer cleanup()
+	if err := PullIfMissing(ctx, client, req.Model); err != nil {
+		t.Fatalf("PullIfMissing failed: %v", err)
+	}
+	DoGenerate(ctx, t, client, req, []string{"once", "upon", "lived"}, 120*time.Second, 10*time.Second)
+}
diff --git a/integration/embed_test.go b/integration/embed_test.go
new file mode 100644
index 0000000..09369db
--- /dev/null
+++ b/integration/embed_test.go
@@ -0,0 +1,247 @@
+//go:build integration
+
+package integration
+
+import (
+	"context"
+	"math"
+	"testing"
+	"time"
+
+	"github.com/ollama/ollama/api"
+)
+
+func dotProduct[V float32 | float64](v1, v2 []V) V {
+	var result V = 0
+	for i := 0; i < len(v1); i++ {
+		result += v1[i] * v2[i]
+	}
+	return result
+}
+
+func magnitude[V float32 | float64](v []V) V {
+	var result V = 0
+	for _, val := range v {
+		result += val * val
+	}
+	return V(math.Sqrt(float64(result)))
+}
+
+func cosineSimilarity[V float32 | float64](v1, v2 []V) V {
+	return dotProduct(v1, v2) / (magnitude(v1) * magnitude(v2))
+}
+
+func TestAllMiniLMEmbeddings(t *testing.T) {
+	ctx, cancel := context.WithTimeout(context.Background(), 2*time.Minute)
+	defer cancel()
+	client, _, cleanup := InitServerConnection(ctx, t)
+	defer cleanup()
+
+	req := api.EmbeddingRequest{
+		Model:  "all-minilm",
+		Prompt: "why is the sky blue?",
+	}
+
+	res, err := embeddingTestHelper(ctx, client, t, req)
+
+	if err != nil {
+		t.Fatalf("error: %v", err)
+	}
+
+	if len(res.Embedding) != 384 {
+		t.Fatalf("expected 384 floats, got %d", len(res.Embedding))
+	}
+
+	expected := []float64{
+		0.06642947345972061, -0.01160573959350586, 0.3302811086177826, 0.309552937746048, 0.36223655939102173, 0.05672447010874748, 0.6955016851425171, -0.17069467902183533, 0.8547305464744568, 0.21076075732707977, -0.29339903593063354, -0.05926772207021713, -0.003363408148288727, -0.4204462468624115, -0.1061280220746994, 0.30754348635673523, -0.14551642537117004, -1.0430994033813477, -0.4805174171924591, -0.40448474884033203, -0.4345352053642273, 0.3573606014251709, -0.4098161458969116, 0.25664326548576355, -0.3021087646484375, 0.36236199736595154, -0.23262615501880646, 0.08319848775863647, 0.28042519092559814, -0.052289899438619614, -0.12552005052566528, 0.402255117893219, 0.24357250332832336, 0.08881516754627228, -0.17023836076259613, -0.2868475615978241, 0.4790303707122803, -0.3199635446071625, 0.02826809138059616, -0.19417747855186462, -0.19217649102210999, -0.21705707907676697, -0.1210065633058548, 0.10262420773506165, -0.07726037502288818, 0.10094445943832397, -0.06194962561130524, 0.1712605208158493, 0.628441333770752, -0.10222385078668594, -0.16214007139205933, 0.059920795261859894, -0.5053377151489258, 0.10545563697814941, 0.32686805725097656, 0.7650210857391357, 0.006465774029493332, -0.13403119146823883, 0.6090353727340698, 0.05603303387761116, -0.37635889649391174, 0.45424884557724, -0.5053073763847351, 0.4572359323501587, 0.6084011197090149, -0.3659921884536743, -0.3536888360977173, 0.05569244921207428, -0.4166066646575928, -0.43796032667160034, -0.16600576043128967, 0.12460685521364212, 0.40493422746658325, -0.18632565438747406, 0.2390710711479187, 0.007283639162778854, 0.4001992344856262, -0.4455743134021759, -0.05360018089413643, -0.08401738107204437, 0.2041706144809723, -0.42083415389060974, -0.491476833820343, 0.7860275506973267, 0.08280622214078903, 0.4309011697769165, 0.09778489172458649, 0.3392091989517212, -0.5618907809257507, 0.06766007840633392, -0.05127308890223503, -0.23472431302070618, -0.7611223459243774, -0.20227840542793274, -0.5491426587104797, 0.09030043333768845, 0.37326449155807495, -0.2696656584739685, 0.2814738154411316, 0.1461343765258789, 0.309052437543869, -0.3387487828731537, 0.1990429162979126, 0.0474909171462059, -0.02756538614630699, -0.20544570684432983, 0.5137258768081665, 0.22562497854232788, 0.40487033128738403, 0.04954294115304947, -0.23911823332309723, -0.5578761696815491, 0.14376327395439148, -0.12795016169548035, -0.26285219192504883, 0.3614377975463867, -0.22225692868232727, 0.11940789222717285, -0.6961514353752136, -0.3324243426322937, -0.07613810151815414, 0.24946099519729614, 0.1462409496307373, 0.5309336185455322, 0.051560595631599426, -0.11104149371385574, -0.39189594984054565, -4.767201176712463e-32, 0.892546534538269, -0.07396792620420456, 0.6088366508483887, 0.23729179799556732, 0.2614588737487793, -0.3626874089241028, -0.23131835460662842, -0.024579279124736786, -0.12901946902275085, -0.2306443750858307, -0.0376533679664135, -0.09649471938610077, -0.16013199090957642, -0.31914401054382324, 0.3151017129421234, -0.11264121532440186, -0.4020160734653473, 0.039211247116327286, -0.5478582978248596, 0.5563258528709412, -0.6903842091560364, 0.2746567130088806, -0.24196553230285645, -0.053318753838539124, -0.18611761927604675, -0.28490889072418213, 0.237456813454628, 0.4946249723434448, 0.37237465381622314, 0.07815749943256378, 0.6494859457015991, 0.6915512084960938, -0.14422327280044556, 0.30338582396507263, -0.17378094792366028, -0.33589833974838257, -0.09702004492282867, -0.04210608825087547, -0.566387414932251, 0.18866634368896484, -0.3533778488636017, 0.37286972999572754, -0.39420801401138306, 0.0818595215678215, 0.436712384223938, -0.08886678516864777, 0.2527940273284912, -0.5864061117172241, -0.37891554832458496, 0.21103361248970032, -0.2275354266166687, 0.1558678150177002, 0.09536703675985336, -0.27437490224838257, 0.4484926164150238, 0.20584626495838165, 0.45972558856010437, -0.231113001704216, -0.021833699196577072, 0.3253912925720215, -0.08802174031734467, -0.023067735135555267, 0.33492740988731384, 0.5189340114593506, 0.2481488585472107, -0.07638847082853317, 0.25147074460983276, 0.2771286964416504, -0.08443005383014679, -0.5207436084747314, 0.05951530486345291, 0.08816319704055786, 0.15935833752155304, 0.0644921213388443, -0.07194079458713531, -0.5383226871490479, 0.17800968885421753, -0.195652037858963, -0.028597159311175346, 0.08582349121570587, -0.23225288093090057, -0.12984338402748108, 0.3651025593280792, -0.4039592146873474, -0.3628298342227936, 0.08263863623142242, -0.12648534774780273, -0.08284908533096313, -0.1042669266462326, -0.4579034447669983, -0.2961195111274719, -0.32282471656799316, 0.3182551860809326, -0.6890494227409363, -0.7114676237106323, 2.3665072841905432e-32, -0.0030965525656938553, -0.5696439146995544, -0.5794872045516968, 0.04729880392551422, -0.048917483538389206, -0.10963250696659088, 0.298623263835907, 0.4452674388885498, -0.2828809320926666, 0.5696343183517456, 0.3004711866378784, 0.44842660427093506, 0.06550214439630508, -0.020054858177900314, 0.385932058095932, -0.23460465669631958, 0.23865005373954773, 0.4363722801208496, -0.24931970238685608, -0.41073542833328247, -0.2937365770339966, 0.5095447301864624, 0.2864843010902405, -0.14028388261795044, -0.14269764721393585, 0.4107881486415863, -0.2581801116466522, 0.18544888496398926, -0.08612997084856033, 0.33715111017227173, -0.24288496375083923, 0.3599962592124939, -0.43829354643821716, 0.15094976127147675, 0.03177203983068466, 0.5965112447738647, 0.03364168107509613, -0.5481097102165222, -0.363423228263855, 0.4825053811073303, -0.7288467288017273, -0.13361915946006775, 0.7423286437988281, -0.3515661358833313, -0.37989044189453125, -0.1576842963695526, 0.3734908998012543, 0.8393698930740356, 0.23719121515750885, -0.28990280628204346, 0.11215505003929138, -0.16382968425750732, 0.47951722145080566, 0.28471529483795166, 0.5308315753936768, -0.1286555975675583, -0.22689077258110046, 0.6377706527709961, 0.34224453568458557, 0.07091143727302551, 0.26538553833961487, 0.014475930482149124, -0.050034329295158386, 0.011025313287973404, 0.09357182681560516, 0.1345357596874237, -0.1523902863264084, 0.14176052808761597, -0.0609259307384491, -0.3332745134830475, -0.1072426363825798, -0.5933747291564941, -0.40028926730155945, 0.5343422293663025, 0.016202416270971298, 0.27436596155166626, 0.28844428062438965, -0.1660136878490448, -0.6286065578460693, 0.5850632190704346, -0.6491153836250305, -0.03207448124885559, 0.23312292993068695, 0.09339666366577148, -0.42595869302749634, -0.5011518001556396, 0.08187201619148254, -0.3312609791755676, -0.3677852153778076, -0.3758619427680969, -0.12195874005556107, -0.014479270204901695, -0.014539752155542374, 0.23270025849342346, -0.3609132170677185, -9.438503667524856e-8, -0.05230816453695297, 0.17612962424755096, 0.01489749364554882, 0.06601762771606445, -0.14300350844860077, -0.1422577053308487, 0.7347333431243896, 0.030603498220443726, 0.24959787726402283, 0.026135217398405075, -0.4412609338760376, -0.18663707375526428, -0.29235413670539856, 0.4696626365184784, 0.12353914976119995, -0.3236965537071228, -0.6856554746627808, -0.28768694400787354, 0.0671629011631012, 0.27566438913345337, -0.0893339067697525, -0.22328855097293854, -0.16536207497119904, -0.08968719840049744, 0.022607458755373955, 0.21818216145038605, -0.14408129453659058, 0.14458191394805908, 0.4712568521499634, 0.13527995347976685, 0.16118602454662323, 0.23675017058849335, -0.0062652211636304855, -0.4045848250389099, -0.5631943345069885, 0.04897312819957733, -0.2558498978614807, 0.5269845128059387, -0.16870160400867462, -0.39874112606048584, 0.3996037244796753, 0.5432316660881042, -0.3740345239639282, 0.031965695321559906, 0.29769593477249146, 0.1568443477153778, 0.287019282579422, 0.6005253791809082, -0.33905476331710815, -0.07407552748918533, -0.4541633129119873, 0.047827333211898804, 0.4803982973098755, -0.2860602140426636, 0.17097190022468567, -0.7525586485862732, -0.06290972977876663, 0.14645379781723022, 0.176426962018013, 0.024587953463196754, 0.105128213763237, 0.023733407258987427, -0.1363760083913803, 0.22127331793308258,
+	}
+	sim := cosineSimilarity(res.Embedding, expected)
+	if sim < 0.99 {
+		t.Fatalf("expected %v, got %v (similarity: %f)", expected[0:5], res.Embedding[0:5], sim)
+	}
+}
+
+func TestAllMiniLMEmbed(t *testing.T) {
+	ctx, cancel := context.WithTimeout(context.Background(), 2*time.Minute)
+	defer cancel()
+	client, _, cleanup := InitServerConnection(ctx, t)
+	defer cleanup()
+
+	req := api.EmbedRequest{
+		Model: "all-minilm",
+		Input: "why is the sky blue?",
+	}
+
+	res, err := embedTestHelper(ctx, client, t, req)
+
+	if err != nil {
+		t.Fatalf("error: %v", err)
+	}
+
+	if len(res.Embeddings) != 1 {
+		t.Fatalf("expected 1 embedding, got %d", len(res.Embeddings))
+	}
+
+	if len(res.Embeddings[0]) != 384 {
+		t.Fatalf("expected 384 floats, got %d", len(res.Embeddings[0]))
+	}
+
+	expected := []float32{
+		0.010071031, -0.0017594865, 0.050072223, 0.046929732, 0.05491682, 0.008599705, 0.105441436, -0.025878143, 0.1295813, 0.031952355, -0.04448072, -0.0089852745, -0.000509909, -0.06374169, -0.016089523, 0.04662509, -0.022060998, -0.15813895, -0.072848774, -0.061321855, -0.065877646, 0.054177605, -0.06213012, 0.038908366, -0.04580116, 0.05493584, -0.035267256, 0.012613296, 0.04251382, -0.007927403, -0.01902945, 0.060983833, 0.036926776, 0.013464811, -0.025808964, -0.043487485, 0.072623335, -0.04850803, 0.00428558, -0.02943825, -0.02913489, -0.03290691, -0.018345183, 0.0155583285, -0.011713048, 0.01530367, -0.009391865, 0.025963927, 0.09527476, -0.015497632, -0.024581224, 0.009084283, -0.07661165, 0.015987588, 0.049554788, 0.115980916, 0.0009802427, -0.02031978, 0.09233272, 0.00849488, -0.05705784, 0.068866335, -0.076607056, 0.06931919, 0.09223656, -0.055486195, -0.053620946, 0.008443246, -0.06315959, -0.066396914, -0.02516728, 0.018891005, 0.061389998, -0.028247874, 0.036244337, 0.0011042351, 0.06067215, -0.06755123, -0.008126048, -0.012737444, 0.030953258, -0.06380051, -0.07451028, 0.1191656, 0.012553826, 0.06532671, 0.014824665, 0.051425762, -0.08518537, 0.010257597, -0.0077732494, -0.035585348, -0.115389846, -0.03066639, -0.0832527, 0.013689985, 0.056588713, -0.040882625, 0.042672798, 0.022154681, 0.04685385, -0.05135596, 0.030175874, 0.007199854, -0.0041790465, -0.031146567, 0.07788334, 0.034205843, 0.06138031, 0.007510951, -0.036251485, -0.08457674, 0.021795211, -0.019397866, -0.03984967, 0.054795727, -0.033695232, 0.018102817, -0.10553994, -0.050397146, -0.011542906, 0.0378195, 0.022170838, 0.08049212, 0.007816837, -0.01683443, -0.059413332, -7.227309e-33, 0.13531439, -0.011213897, 0.0923026, 0.03597459, 0.039638437, -0.054985173, -0.03506899, -0.0037263383, -0.01955998, -0.034966808, -0.0057084337, -0.014629069, -0.024276787, -0.048383784, 0.04777095, -0.017076956, -0.06094759, 0.0059446157, -0.083057985, 0.084341705, -0.1046656, 0.041639294, -0.03668315, -0.008083383, -0.028216336, -0.04319357, 0.035999607, 0.07498755, 0.05645381, 0.011849057, 0.09846523, 0.10484252, -0.021864949, 0.045994766, -0.026346037, -0.05092382, -0.014708711, -0.0063834875, -0.085867085, 0.028602734, -0.0535738, 0.056528863, -0.059763853, 0.012410302, 0.06620772, -0.013472636, 0.038324803, -0.08890202, -0.05744544, 0.03199372, -0.034495477, 0.02363032, 0.014458106, -0.04159657, 0.06799366, 0.031207295, 0.069696635, -0.035037853, -0.0033100948, 0.0493309, -0.0133445235, -0.0034971808, 0.050776623, 0.078672916, 0.037620574, -0.011580864, 0.03812419, 0.04201406, -0.012800006, -0.07894726, 0.00902281, 0.013365969, 0.024159499, 0.009777319, -0.010906574, -0.08161233, 0.026987134, -0.0296618, -0.004335468, 0.013011258, -0.035210665, -0.019684888, 0.055351324, -0.06124218, -0.055006765, 0.012528419, -0.019175794, -0.012560324, -0.015807373, -0.06942039, -0.044893157, -0.048941795, 0.048249032, -0.10446324, -0.10786195, 3.58774e-33, -0.0004694524, -0.08636079, -0.087853074, 0.0071707284, -0.007416128, -0.01662082, 0.045272738, 0.06750471, -0.042886123, 0.08635933, 0.04555289, 0.06798365, 0.009930444, -0.003040414, 0.058509175, -0.035567205, 0.036180507, 0.06615616, -0.03779808, -0.062269486, -0.044531893, 0.07724946, 0.04343241, -0.021267718, -0.021633657, 0.06227748, -0.03914136, 0.028114952, -0.013057723, 0.051113747, -0.036822543, 0.054577183, -0.06644743, 0.022884717, 0.0048167957, 0.09043401, 0.0051002423, -0.083096094, -0.055096727, 0.07315016, -0.11049671, -0.020257315, 0.11254063, -0.053299136, -0.057593238, -0.023905706, 0.056623034, 0.12725255, 0.03595934, -0.043950673, 0.017003251, -0.024837377, 0.07269714, 0.043164223, 0.08047665, -0.019504813, -0.034397744, 0.096689135, 0.051885936, 0.010750518, 0.04023374, 0.0021946214, -0.0075854477, 0.0016714911, 0.014185944, 0.020396275, -0.023103109, 0.021491585, -0.009236667, -0.050526038, -0.016258504, -0.0899585, -0.0606858, 0.08100888, 0.0024563652, 0.041595213, 0.043729555, -0.025168482, -0.09529981, 0.088698424, -0.09840905, -0.0048626475, 0.03534257, 0.014159388, -0.06457741, -0.07597705, 0.012412196, -0.050220776, -0.055758025, -0.0569825, -0.018489538, -0.0021951278, -0.002204297, 0.03527849, -0.0547162, -1.430923e-8, -0.007930172, 0.026702108, 0.0022585324, 0.010008593, -0.021680027, -0.02156696, 0.111389145, 0.004639639, 0.03784025, 0.003962226, -0.0668973, -0.028295087, -0.04432231, 0.07120314, 0.018729135, -0.04907397, -0.103948705, -0.043614738, 0.010182222, 0.04179206, -0.013543455, -0.03385163, -0.025069695, -0.013597015, 0.0034274007, 0.033077475, -0.021843424, 0.021919321, 0.07144483, 0.020509098, 0.024436586, 0.035892475, -0.00094983797, -0.061337028, -0.085383, 0.007424564, -0.038788088, 0.07989341, -0.025575982, -0.060451094, 0.060581867, 0.082356565, -0.056705453, 0.0048461547, 0.04513215, 0.023778366, 0.043513518, 0.09104256, -0.05140235, -0.01123021, -0.06885336, 0.007250856, 0.072830714, -0.04336812, 0.025920171, -0.11409155, -0.009537421, 0.022203108, 0.026747186, 0.0037276533, 0.015937949, 0.0035980998, -0.020675266, 0.03354611,
+	}
+	sim := cosineSimilarity(res.Embeddings[0], expected)
+	if sim < 0.99 {
+		t.Fatalf("expected %v, got %v (similarity: %f)", expected[0:5], res.Embeddings[0][0:5], sim)
+	}
+
+	if res.PromptEvalCount != 6 {
+		t.Fatalf("expected 6 prompt tokens, got %d", res.PromptEvalCount)
+	}
+}
+
+func TestAllMiniLMBatchEmbed(t *testing.T) {
+	ctx, cancel := context.WithTimeout(context.Background(), 2*time.Minute)
+	defer cancel()
+	client, _, cleanup := InitServerConnection(ctx, t)
+	defer cleanup()
+
+	req := api.EmbedRequest{
+		Model: "all-minilm",
+		Input: []string{"why is the sky blue?", "why is the grass green?"},
+	}
+
+	res, err := embedTestHelper(ctx, client, t, req)
+
+	if err != nil {
+		t.Fatalf("error: %v", err)
+	}
+
+	if len(res.Embeddings) != 2 {
+		t.Fatalf("expected 2 embeddings, got %d", len(res.Embeddings))
+	}
+
+	if len(res.Embeddings[0]) != 384 {
+		t.Fatalf("expected 384 floats, got %d", len(res.Embeddings[0]))
+	}
+
+	expected := [][]float32{
+		{
+			0.010071031, -0.0017594865, 0.050072223, 0.046929732, 0.05491682, 0.008599705, 0.105441436, -0.025878143, 0.1295813, 0.031952355, -0.04448072, -0.0089852745, -0.000509909, -0.06374169, -0.016089523, 0.04662509, -0.022060998, -0.15813895, -0.072848774, -0.061321855, -0.065877646, 0.054177605, -0.06213012, 0.038908366, -0.04580116, 0.05493584, -0.035267256, 0.012613296, 0.04251382, -0.007927403, -0.01902945, 0.060983833, 0.036926776, 0.013464811, -0.025808964, -0.043487485, 0.072623335, -0.04850803, 0.00428558, -0.02943825, -0.02913489, -0.03290691, -0.018345183, 0.0155583285, -0.011713048, 0.01530367, -0.009391865, 0.025963927, 0.09527476, -0.015497632, -0.024581224, 0.009084283, -0.07661165, 0.015987588, 0.049554788, 0.115980916, 0.0009802427, -0.02031978, 0.09233272, 0.00849488, -0.05705784, 0.068866335, -0.076607056, 0.06931919, 0.09223656, -0.055486195, -0.053620946, 0.008443246, -0.06315959, -0.066396914, -0.02516728, 0.018891005, 0.061389998, -0.028247874, 0.036244337, 0.0011042351, 0.06067215, -0.06755123, -0.008126048, -0.012737444, 0.030953258, -0.06380051, -0.07451028, 0.1191656, 0.012553826, 0.06532671, 0.014824665, 0.051425762, -0.08518537, 0.010257597, -0.0077732494, -0.035585348, -0.115389846, -0.03066639, -0.0832527, 0.013689985, 0.056588713, -0.040882625, 0.042672798, 0.022154681, 0.04685385, -0.05135596, 0.030175874, 0.007199854, -0.0041790465, -0.031146567, 0.07788334, 0.034205843, 0.06138031, 0.007510951, -0.036251485, -0.08457674, 0.021795211, -0.019397866, -0.03984967, 0.054795727, -0.033695232, 0.018102817, -0.10553994, -0.050397146, -0.011542906, 0.0378195, 0.022170838, 0.08049212, 0.007816837, -0.01683443, -0.059413332, -7.227309e-33, 0.13531439, -0.011213897, 0.0923026, 0.03597459, 0.039638437, -0.054985173, -0.03506899, -0.0037263383, -0.01955998, -0.034966808, -0.0057084337, -0.014629069, -0.024276787, -0.048383784, 0.04777095, -0.017076956, -0.06094759, 0.0059446157, -0.083057985, 0.084341705, -0.1046656, 0.041639294, -0.03668315, -0.008083383, -0.028216336, -0.04319357, 0.035999607, 0.07498755, 0.05645381, 0.011849057, 0.09846523, 0.10484252, -0.021864949, 0.045994766, -0.026346037, -0.05092382, -0.014708711, -0.0063834875, -0.085867085, 0.028602734, -0.0535738, 0.056528863, -0.059763853, 0.012410302, 0.06620772, -0.013472636, 0.038324803, -0.08890202, -0.05744544, 0.03199372, -0.034495477, 0.02363032, 0.014458106, -0.04159657, 0.06799366, 0.031207295, 0.069696635, -0.035037853, -0.0033100948, 0.0493309, -0.0133445235, -0.0034971808, 0.050776623, 0.078672916, 0.037620574, -0.011580864, 0.03812419, 0.04201406, -0.012800006, -0.07894726, 0.00902281, 0.013365969, 0.024159499, 0.009777319, -0.010906574, -0.08161233, 0.026987134, -0.0296618, -0.004335468, 0.013011258, -0.035210665, -0.019684888, 0.055351324, -0.06124218, -0.055006765, 0.012528419, -0.019175794, -0.012560324, -0.015807373, -0.06942039, -0.044893157, -0.048941795, 0.048249032, -0.10446324, -0.10786195, 3.58774e-33, -0.0004694524, -0.08636079, -0.087853074, 0.0071707284, -0.007416128, -0.01662082, 0.045272738, 0.06750471, -0.042886123, 0.08635933, 0.04555289, 0.06798365, 0.009930444, -0.003040414, 0.058509175, -0.035567205, 0.036180507, 0.06615616, -0.03779808, -0.062269486, -0.044531893, 0.07724946, 0.04343241, -0.021267718, -0.021633657, 0.06227748, -0.03914136, 0.028114952, -0.013057723, 0.051113747, -0.036822543, 0.054577183, -0.06644743, 0.022884717, 0.0048167957, 0.09043401, 0.0051002423, -0.083096094, -0.055096727, 0.07315016, -0.11049671, -0.020257315, 0.11254063, -0.053299136, -0.057593238, -0.023905706, 0.056623034, 0.12725255, 0.03595934, -0.043950673, 0.017003251, -0.024837377, 0.07269714, 0.043164223, 0.08047665, -0.019504813, -0.034397744, 0.096689135, 0.051885936, 0.010750518, 0.04023374, 0.0021946214, -0.0075854477, 0.0016714911, 0.014185944, 0.020396275, -0.023103109, 0.021491585, -0.009236667, -0.050526038, -0.016258504, -0.0899585, -0.0606858, 0.08100888, 0.0024563652, 0.041595213, 0.043729555, -0.025168482, -0.09529981, 0.088698424, -0.09840905, -0.0048626475, 0.03534257, 0.014159388, -0.06457741, -0.07597705, 0.012412196, -0.050220776, -0.055758025, -0.0569825, -0.018489538, -0.0021951278, -0.002204297, 0.03527849, -0.0547162, -1.430923e-8, -0.007930172, 0.026702108, 0.0022585324, 0.010008593, -0.021680027, -0.02156696, 0.111389145, 0.004639639, 0.03784025, 0.003962226, -0.0668973, -0.028295087, -0.04432231, 0.07120314, 0.018729135, -0.04907397, -0.103948705, -0.043614738, 0.010182222, 0.04179206, -0.013543455, -0.03385163, -0.025069695, -0.013597015, 0.0034274007, 0.033077475, -0.021843424, 0.021919321, 0.07144483, 0.020509098, 0.024436586, 0.035892475, -0.00094983797, -0.061337028, -0.085383, 0.007424564, -0.038788088, 0.07989341, -0.025575982, -0.060451094, 0.060581867, 0.082356565, -0.056705453, 0.0048461547, 0.04513215, 0.023778366, 0.043513518, 0.09104256, -0.05140235, -0.01123021, -0.06885336, 0.007250856, 0.072830714, -0.04336812, 0.025920171, -0.11409155, -0.009537421, 0.022203108, 0.026747186, 0.0037276533, 0.015937949, 0.0035980998, -0.020675266, 0.03354611,
+		},
+		{
+			-0.009802706, 0.060424678, 0.025257956, -0.0063643856, 0.07272723, 0.01719488, 0.090320334, -0.051705167, 0.099515095, 0.09072479, 0.007301506, -0.01968127, -0.075095184, -0.017409375, 0.019365614, 0.040805466, -0.011079843, -0.05856395, -0.12545314, -0.048980292, -0.044052314, 0.03115607, 0.037880868, -0.03187379, -0.0909825, 0.06357952, -0.076541565, 0.085011445, 0.03554875, -0.071272224, 0.021114277, 0.11005397, 0.03312636, -0.025947863, -0.061563145, -0.026466936, 0.02054478, -0.05426622, 0.056569945, 0.03292456, -0.09005933, -0.05698778, 0.026827272, 0.0751872, -0.07142025, -0.0043633, 0.054151993, 0.026441583, 0.078053534, -0.048995998, 0.056577347, -0.048973206, -0.07581186, 0.006902122, 0.0062451144, 0.037024222, 0.025028007, 0.021724675, 0.010117283, -0.040492155, -0.012010403, -0.03334674, -0.07570402, 0.071321115, -0.02062346, -0.0631419, -0.001237942, -0.055173304, 0.009124682, -0.08703634, 0.020684991, 0.05294139, -0.009563882, -0.052647192, -0.06467313, 0.041968923, 0.04473555, 0.03270584, -0.019611169, 0.00013324046, 0.038228948, 0.0509972, 0.0047100335, 0.05736671, 0.046469305, 0.04269017, -0.017305125, 0.011859765, -0.05701112, -0.03498464, -0.018940303, -0.0074608736, -0.07385685, 0.043892473, -0.09890047, 0.041379265, -0.024019944, -0.12034819, 0.0001821356, -0.0038607453, 0.056144036, -0.0005059898, 0.07110965, -0.03616245, -0.06406574, -0.009435536, -0.042290587, 0.07791005, -0.02365763, 0.007864432, -0.023739463, -0.018536761, -0.033538047, 0.0776669, -0.06058719, 0.05363198, 0.033863083, 0.012545284, -0.03260245, 0.029770961, -0.016934512, 0.028213669, -0.018053731, 0.06651968, -0.06952628, -0.017853932, -0.037421644, -6.839719e-33, -0.0055490523, -0.031681225, 0.04819487, -0.09944883, 0.09372583, -0.051811725, -0.037059266, -0.026262678, -0.037466466, -0.030253021, 0.0060922937, -0.09831781, -0.017570594, -0.07247917, 0.03856134, 0.00888377, -0.13072893, 0.02145255, -0.075681135, -0.010470858, -0.017236665, 0.058358245, 0.022016024, 0.0015762328, 0.009419801, -0.031423207, 0.08002972, 0.030580623, 0.05696977, -0.012164853, 0.11575935, 0.0040441174, 0.01759827, 0.043209996, 0.02948431, -0.0069428794, -0.025078153, -0.026160793, 0.013364178, 0.121543564, -0.004469769, -0.04534167, 0.043418996, -0.01768049, 0.062162045, -0.039375506, 0.017406953, 0.008458191, -0.02603069, 0.010130821, 0.023227274, 0.05305319, 0.06899141, 0.053088874, -0.0003113895, 0.009642751, 0.08884011, -0.030399954, -0.090916164, -0.051467095, -0.07382789, 0.08624027, 0.003223033, 0.010827092, -0.008318035, -0.011421701, -0.02900046, 0.06548931, 0.005405483, 0.068780296, 0.0428464, -0.01878741, -0.016996592, -0.036818627, -0.0062817424, -0.08700542, -0.008640271, -0.013171244, -0.004574588, 0.04233393, -0.03579696, 0.017357353, -0.087162524, -0.050884914, -0.14957926, -0.002008126, -0.02634847, 0.018098367, 0.02162604, -0.01503002, 0.0037868456, -0.015445877, -0.013303974, -0.09810386, -0.011673153, 2.8261164e-33, -0.022961555, 0.0090464745, -0.0057421196, 0.06604244, 0.042683356, -0.039691485, 0.027226122, 0.03183442, -0.028517157, 0.045575514, -0.055865873, 0.0924774, -0.046869125, 0.08027759, 0.118624836, 0.04889292, -0.06734586, 0.10688813, 0.009396721, -0.051344905, -0.067946814, 0.01592692, -0.010147019, 0.044173665, -0.030018767, 0.022772646, -0.031494025, -0.02233876, -0.0023573847, -0.010024354, 0.0032828946, -0.036839407, -0.11200184, 0.028629173, 0.030212566, 0.03185506, -0.01746865, -0.018295743, -0.036361173, 0.083925165, 0.007943152, -0.023664381, 0.15850149, 0.032088134, -0.070371404, -0.034124147, -0.015502377, 0.07960292, -0.06218589, 0.046537183, 0.04505064, 0.1043822, 0.029607052, 0.047920443, 0.09711685, -0.015767856, -0.064267434, 0.01960162, -0.093837254, -0.0028061024, 0.019721054, -0.027095793, -0.078636706, 0.0689579, 0.107794516, -0.033122607, -0.064406104, 0.016571952, 0.019280795, -0.023045482, -0.018821374, -0.018646069, -0.06431513, -0.03231013, -0.0027636476, 0.059007723, 0.059882853, -0.044795096, -0.06667144, 0.043793377, -0.019855661, -0.006715758, 0.04733659, -0.046866804, 0.03461545, -0.015199261, -0.039511763, 0.047361404, 0.052113988, 0.0008203065, 0.05290727, 0.02459614, -0.029357709, 0.034541644, 0.013009169, -1.36748e-8, -0.033930536, 0.007378359, -0.010701883, 0.04323486, 0.014735074, -0.04162692, 0.10553509, -0.012822099, -0.002357336, 0.040418625, -0.08136588, 0.033679843, -0.019665385, 0.077529214, 0.060347307, -0.016181026, -0.11332622, -0.04306442, 0.023209568, 0.07448782, -0.06055759, -0.045812756, -0.087526724, 0.0534105, -0.044014834, 0.029827949, 0.038628686, 0.016933717, 0.027725562, 0.078133695, 0.055581007, 0.05306717, -0.010792625, -0.029803185, -0.08492531, -0.016416015, 0.030501937, 0.06944753, -0.061944496, -0.122021444, 0.011901371, 0.07258673, -0.017778289, 0.0030972173, 0.014411535, -0.03802866, -0.052976213, 0.060414705, -0.053164586, 0.01794129, -0.104411006, 0.010633235, 0.042881854, 0.042603284, -0.003009017, -0.08530093, -0.039561126, -0.004481811, 0.013104284, -0.008498699, -0.028943708, -0.03587923, 0.05940551, -0.000055299755,
+		},
+	}
+
+	sim := cosineSimilarity(res.Embeddings[0], expected[0])
+	if sim < 0.99 {
+		t.Fatalf("expected %v, got %v (similarity: %f)", expected[0][0:5], res.Embeddings[0][0:5], sim)
+	}
+	sim = cosineSimilarity(res.Embeddings[1], expected[1])
+	if sim < 0.99 {
+		t.Fatalf("expected %v, got %v (similarity: %f)", expected[1][0:5], res.Embeddings[1][0:5], sim)
+	}
+
+	if res.PromptEvalCount != 12 {
+		t.Fatalf("expected 12 prompt tokens, got %d", res.PromptEvalCount)
+	}
+}
+
+func TestAllMiniLMEmbedTruncate(t *testing.T) {
+	ctx, cancel := context.WithTimeout(context.Background(), 2*time.Minute)
+	defer cancel()
+	client, _, cleanup := InitServerConnection(ctx, t)
+	defer cleanup()
+
+	truncTrue, truncFalse := true, false
+
+	type testReq struct {
+		Name    string
+		Request api.EmbedRequest
+	}
+
+	reqs := []testReq{
+		{
+			Name: "Target Truncation",
+			Request: api.EmbedRequest{
+				Model: "all-minilm",
+				Input: "why",
+			},
+		},
+		{
+			Name: "Default Truncate",
+			Request: api.EmbedRequest{
+				Model:   "all-minilm",
+				Input:   "why is the sky blue?",
+				Options: map[string]any{"num_ctx": 1},
+			},
+		},
+		{
+			Name: "Explicit Truncate",
+			Request: api.EmbedRequest{
+				Model:    "all-minilm",
+				Input:    "why is the sky blue?",
+				Truncate: &truncTrue,
+				Options:  map[string]any{"num_ctx": 1},
+			},
+		},
+	}
+
+	res := make(map[string]*api.EmbedResponse)
+
+	for _, req := range reqs {
+		response, err := embedTestHelper(ctx, client, t, req.Request)
+		if err != nil {
+			t.Fatalf("error: %v", err)
+		}
+		res[req.Name] = response
+	}
+
+	if res["Target Truncation"].Embeddings[0][0] != res["Default Truncate"].Embeddings[0][0] {
+		t.Fatal("expected default request to truncate correctly")
+	}
+
+	if res["Default Truncate"].Embeddings[0][0] != res["Explicit Truncate"].Embeddings[0][0] {
+		t.Fatal("expected default request and truncate true request to be the same")
+	}
+
+	// check that truncate set to false returns an error if context length is exceeded
+	_, err := embedTestHelper(ctx, client, t, api.EmbedRequest{
+		Model:    "all-minilm",
+		Input:    "why is the sky blue?",
+		Truncate: &truncFalse,
+		Options:  map[string]any{"num_ctx": 1},
+	})
+
+	if err == nil {
+		t.Fatal("expected error, got nil")
+	}
+}
+
+func embeddingTestHelper(ctx context.Context, client *api.Client, t *testing.T, req api.EmbeddingRequest) (*api.EmbeddingResponse, error) {
+	if err := PullIfMissing(ctx, client, req.Model); err != nil {
+		t.Fatalf("failed to pull model %s: %v", req.Model, err)
+	}
+
+	response, err := client.Embeddings(ctx, &req)
+
+	if err != nil {
+		return nil, err
+	}
+
+	return response, nil
+}
+
+func embedTestHelper(ctx context.Context, client *api.Client, t *testing.T, req api.EmbedRequest) (*api.EmbedResponse, error) {
+	if err := PullIfMissing(ctx, client, req.Model); err != nil {
+		t.Fatalf("failed to pull model %s: %v", req.Model, err)
+	}
+
+	response, err := client.Embed(ctx, &req)
+
+	if err != nil {
+		return nil, err
+	}
+
+	return response, nil
+}
diff --git a/integration/llm_image_test.go b/integration/llm_image_test.go
new file mode 100644
index 0000000..b9726c8
--- /dev/null
+++ b/integration/llm_image_test.go
@@ -0,0 +1,586 @@
+//go:build integration
+
+package integration
+
+import (
+	"context"
+	"encoding/base64"
+	"testing"
+	"time"
+
+	"github.com/ollama/ollama/api"
+	"github.com/stretchr/testify/require"
+)
+
+func TestVisionModels(t *testing.T) {
+	skipUnderMinVRAM(t, 6)
+	type testCase struct {
+		model string
+	}
+	testCases := []testCase{
+		{
+			model: "llava:7b",
+		},
+		{
+			model: "llama3.2-vision",
+		},
+		{
+			model: "gemma3",
+		},
+	}
+
+	for _, v := range testCases {
+		t.Run(v.model, func(t *testing.T) {
+			image, err := base64.StdEncoding.DecodeString(imageEncoding)
+			require.NoError(t, err)
+			req := api.GenerateRequest{
+				Model:  v.model,
+				Prompt: "what does the text in this image say?",
+				Stream: &stream,
+				Options: map[string]any{
+					"seed":        42,
+					"temperature": 0.0,
+				},
+				Images: []api.ImageData{
+					image,
+				},
+			}
+			ctx, cancel := context.WithTimeout(context.Background(), 5*time.Minute)
+			defer cancel()
+			client, _, cleanup := InitServerConnection(ctx, t)
+
+			// Note: sometimes it returns "the ollamas" sometimes "the ollams"
+			resp := "the ollam"
+			defer cleanup()
+			require.NoError(t, PullIfMissing(ctx, client, req.Model))
+			// llava models on CPU can be quite slow to start
+			DoGenerate(ctx, t, client, req, []string{resp}, 240*time.Second, 30*time.Second)
+		})
+	}
+}
+
+func TestIntegrationSplitBatch(t *testing.T) {
+	image, err := base64.StdEncoding.DecodeString(imageEncoding)
+	require.NoError(t, err)
+	req := api.GenerateRequest{
+		Model: "gemma3:4b",
+		// Fill up a chunk of the batch so the image will partially spill over into the next one
+		System: "Lorem ipsum dolor sit amet, consectetur adipiscing elit. Sed aliquet, justo in malesuada lobortis, odio ligula volutpat quam, quis faucibus ipsum magna quis sapien. Aliquam in venenatis diam, eu viverra magna. Phasellus imperdiet hendrerit volutpat. Vivamus sem ex, facilisis placerat felis non, dictum elementum est. Phasellus aliquam imperdiet lacus, eget placerat ligula sodales vel. Pellentesque nec auctor mi. Curabitur arcu nisi, faucibus eget nunc id, viverra interdum mi. Curabitur ornare ipsum ex, ac euismod ex aliquam in. Vestibulum id magna at purus accumsan fermentum. Proin scelerisque posuere nunc quis interdum. Maecenas sed mollis nisl. Etiam vitae ipsum interdum, placerat est quis, tincidunt velit. Nullam tempor nibh non lorem volutpat efficitur. Cras laoreet diam imperdiet ipsum auctor bibendum. Suspendisse ultrices urna sed metus sagittis suscipit. Quisque ullamcorper aliquam nibh ut mollis. Aenean dapibus mauris pharetra, venenatis elit ac, hendrerit odio. Cras vestibulum erat tempor, lobortis justo eu, lobortis ipsum. Nam laoreet dapibus sem. Proin vel diam ultrices, elementum ante et, ornare lectus. Proin eu accumsan nisl. Praesent ac ex vitae ipsum vulputate tristique facilisis sit amet lacus. Nullam faucibus magna a pellentesque pretium. Nunc lacinia ullamcorper sollicitudin. Donec vitae accumsan turpis, sed porttitor est. Donec porttitor mi vitae augue faucibus, vel mollis diam tincidunt.",
+		Prompt: "what does the text in this image say?",
+		Stream: &stream,
+		Options: map[string]any{
+			"seed":        42,
+			"temperature": 0.0,
+		},
+		Images: []api.ImageData{
+			image,
+		},
+	}
+
+	// Note: sometimes it returns "the ollamas" sometimes "the ollams"
+	resp := "the ollam"
+	ctx, cancel := context.WithTimeout(context.Background(), 3*time.Minute)
+	defer cancel()
+	client, _, cleanup := InitServerConnection(ctx, t)
+	defer cleanup()
+	require.NoError(t, PullIfMissing(ctx, client, req.Model))
+	// llava models on CPU can be quite slow to start,
+	DoGenerate(ctx, t, client, req, []string{resp}, 120*time.Second, 30*time.Second)
+}
+
+const imageEncoding = `iVBORw0KGgoAAAANSUhEUgAAANIAAAB4CAYAAACHHqzKAAAAAXNSR0IArs4c6QAAAIRlWElmTU0AKgAAAAgABQESAAMAAAABAAEAAAEaAAUAAAABAAAASgEb
+AAUAAAABAAAAUgEoAAMAAAABAAIAAIdpAAQAAAABAAAAWgAAAAAAAABIAAAAAQAAAEgAAAABAAOgAQADAAAAAQABAACgAgAEAAAAAQAAANKgAwAEAAAAAQAA
+AHgAAAAAXdsepgAAAAlwSFlzAAALEwAACxMBAJqcGAAAAVlpVFh0WE1MOmNvbS5hZG9iZS54bXAAAAAAADx4OnhtcG1ldGEgeG1sbnM6eD0iYWRvYmU6bnM6
+bWV0YS8iIHg6eG1wdGs9IlhNUCBDb3JlIDYuMC4wIj4KICAgPHJkZjpSREYgeG1sbnM6cmRmPSJodHRwOi8vd3d3LnczLm9yZy8xOTk5LzAyLzIyLXJkZi1z
+eW50YXgtbnMjIj4KICAgICAgPHJkZjpEZXNjcmlwdGlvbiByZGY6YWJvdXQ9IiIKICAgICAgICAgICAgeG1sbnM6dGlmZj0iaHR0cDovL25zLmFkb2JlLmNv
+bS90aWZmLzEuMC8iPgogICAgICAgICA8dGlmZjpPcmllbnRhdGlvbj4xPC90aWZmOk9yaWVudGF0aW9uPgogICAgICA8L3JkZjpEZXNjcmlwdGlvbj4KICAg
+PC9yZGY6UkRGPgo8L3g6eG1wbWV0YT4KGV7hBwAAQABJREFUeAGE3QfgX9P5OP6TIRKRncgmS6aR2DNCKEKLqqpRW9FWq0q1dEQparZKF7VK7aq99yZGSCRB
+BhErk0Qmyf95nZOTfOqrv/9J7ud977nnPPt5zrz3Ntp0s61XrLnmmql58+Zp6dKlqUWLFmnZsmXp888/Tx07dkwLFy5MX3zxRT4aNWqUmjVrlho3bpzatGmT
+Pvnkk5y/YsWKXHfttdfOv/VauSZNmuRj0aJFSX15cIAPruS3adOmafny5Uld5dDkXP05c+akTp06pTXWWCN99tlnacmSJQGnUVp77VbpvffeS126dM4wli4t
+dK8RsJoHDvUXL16cy7du3TrjXrBgQS675prNUsu1WgV/AW/ZktSxQ4dMC37BXbDgs7Q4aG7cpHFq2bJlpo984EY/3vELB94k+eqjU36V1fz580OmSyO/WZZt
+8+Zr5jKu8YZv8pTgkCoMcnCgm17atm2bz+Gv8NWnvxUrlgd9S3P+4sWLQnZNc91PP/0ktWrVOst19uzZwc9akd98lczxN3fu3FwPLudrtwrelqcsM7LG95rN
+Qv4LF2U6XLvfvMWaq2gi90ahX2mttdbK5ej2o48+ymXokv7Ri/ZPP/00LQ16O3bqmOuwCbiaNSv8Ngs5fhFl2QPe1fXLBtgLutHrVyJnciffZWELS0KWytEL
+Odd66oDjHrnjpdoiGTbyL3DRAX3AT77xEzAW5nrwuY9m/DTp3bvf6Hbt2oWgW2WC3ARYZQdA8+bNW2UYiILU4T6FIsw1w0NAYaZ5RoT4KgRIwa8GgBgEEjC4
+DFJdB9jynTNYDqF+pQdDyqw23ma5nGv1MIcnuAgMHPfQWuholtKKlNaEP2heujQMYyVuTrT8i+VpUeCsNFIEueAFDWBSXD1nOO7PmjU7nK9J+uLzkE/AnRnX
+yi5atDgbcMsoN3/+Z2nK1PfS2i1bxL0mmQ+OXmlEO4fEX4eOHTJORiefPNdYoxiR8nTHwCR8f/EFY8T/iqyThjyjkdHBRdbkIMGFdrLiqIx5/vwFaY2ma+R7
+1UA5M0OjM7Dw59x9sPANDn47dGgfZVOmPSOJP2RF/+5LfjmsX/ckcqp0gkfv+GQDZF9tjyyc+yUbNLjmGHPmzE0LQk6u8Yov5zUYu0YvPGRGFpmfkDd+QvAZ
+F9jwg7F8+RfB29KcX+WMbvxKTfoPGDQ6HC2nShjBKuwXg126dMkKwBAiOA/CCRYBkAHaKhBSvnodIsKrywDBpVCplnWubFWSX+UZP1jKFYK/yPgqXLDQQyFw
+Y1Id5THVPBxl5qxZWfBgEgZ6CLdJtC5oBrd5i+ZRNoQWPM1fMD8bIyNcGBEXn40bRUQKXhktOASMdzRSgoNTukbbhx/OjOtmqVevnql9GHe3bl1DZi2Cjpap
+e/duaZ11OoXzvJsWhzI6d+6Yhg/fOk17590MFz7w8A0Pep2DvzgMC72Zt7in3DrrrBM8r53pgrsamJZEvWoUZAU2OLWMewyPQ+KHE+LBr7qff74sG7M6Ak1U
+z62yenBXfJ9FsGkaLR5HoAt6qLjAw0MNouo64ENTTZwWTDaCR85SaCgtkxYV33SmnFTpJidlHXQPPidaFHjR4T6a3NNCCSBgKM9e8Fdhocu5+5wK7ehUFr8f
+f/xxBL3S25LvkO+Qcrldd/v6imIcy+JG41WMtm/fPjMHISF/8P77YXALMnEAIFbkEvkqUADlI0pSFyMEDXltip0zTvkExckWMNaVzgaeesoQLmPW3arOUxlm
+OIRVIzI+aotBMeoTrnx4wMQXfGhv0rhprvtFRBtOMC/gaYWaN2+R+dK1+DycS3k0zZz5cZQvRt0BnFAeJc+aPTftsvMO6eennJwVWmRTWgmGKJqhffr099LR
+3/t+uvKKv6W+ffumu++5N+2z37Fpj123TLNmzkyd1umcHR9f8FG4rqdgwHnwQNG1C4vH6mRVT4xCGfjcw7trMip8N849DDDJrtZniM7xQz8McUG0SuS+NLq+
+5Coo0Lcya0b3q0uXrmFEjdMnK1tLAbYaL9lrAeCuhkf2nBgs5dgJWeFVYh/oZch4rc7iGr01YMqvOleX3XFK+iU79kEOeFLPffck53A40AFmlQ/+lXeNVvfR
+Cwd86tb6aNA6fx49D3LNbawKGMcI711rrZYZGCYh5JGQUI6EQIDdg7h6dEOi5akPsaQ8BolMs+saXr9gtwyHIVhEKYdQTGICHMpQlkDeD6emCHQU41oYDtM2
+160wlCcMNOJLFwhNaJTAnzN7Tnacxk0apQ8+CIFFfoeOneKvrkTrTN/cuXMyfjQZ04DHOVvHQcFahsefHp+O+V7vaGk6A/0/U+9evdK222wVrVW3XGZA//VT
+9y5tomWakV59+ZnUfO0eaY/dts+8MUo8zA4nHfvqi9Eh7x79pPfSVlvvkLp27Rz5c7KclCM/vEnkRYbyyBe/8hg/OZAhuc6KVptcyQ9PeHEfTvkSmS0LvgUz
+9+NGLqMcvLPn6LYW54M/yyX0AoZruoIPbnYwM4KFfE5vuCDRAxrkf77SDhly5YHNKYMH+pTQxyblK8d58PTZZ9EdjfLKgk8GyqAHTOd+yQU+/KFNK5wDRshB
+HQHAWJJ9tY8u6lotip2xAXXBwYNrrSacTQm6fft2uZIbCONUkGNeswspJhDIUAkVEgw5KAIw5xA5RyRBggGmOqIruBwVnEqMFkekd28ZZqKOuu6DRdBoqwZB
+mNVp4Q7zyTQTJhjKoo/Q5FV60MYJCYLQFy1cnAezTVY0zhG2jkeaNFkjfRKKUL9ROJl6eKs8wl0VCd+2W/ZP199wSx5Xde68TuZ39913y3Jj8HfffXemY8xL
+L6d33p2+ypnRPueTxenHxx8VrdkJacqUqenKq65PHdq3ztH//odfSDuP2DRdfPGf8phDj+C5515Izzz3Sho8sE+aMeP9rBfKZ7DgodU5eaOf/J37JdOqC2Xc
+x0s98AhWNXaBY01jreVF9sZEJjEWL14SjhRjthhHduzUYZUDkgVc4Ah04DvneA734FcOrRy04qTTpStth5wrP3TuUKfaolYCjeq7x07c0+XnANVuODY7U7d/
+//5RZvZK+2yWJ0DkC5r40c0nB3Q50EVmi6Krr4vLJ9hVjx49Mgw0uCZv+Brt8839c9eOsarsJgG46Rpws3cIQjxlOK9NX0NGCUOSRxgSj2e46kJeiC9llEOs
+svKrUNFAobWsusqgi4O4B9aSJYuzMEUFjFa60WywbHaKQ+uOEOr8+TFLFJMKZoWUb8J5o2yZ4SoGBHaTiLJpRaPc314UhiOBAzchi3auK83odr502fL0wnOP
+pf2+fWC65por8njt3XCc9dZbN3XtPjB9MGNKOurow9Mf/3BhhvX66+NiZmlJ2mzTTTMOfx599LH03UOOC8dpm/b/9l7puOOOybhqAfhv+8/t6fCjT047bjc0
+ZtEEqIURzUv/f3l0N4xPi9HqfpQILmqThyCGVrJirGTRIsaL9MDQ/CpDBytCbmYttcqSmT7BsM4GNo3JCF1kxkTHuqfkSTYcRyKrqj92U4JYCaLkpuyCGKN+
++un8fF51TIdsEN3orLYCpm4cmLNnzwrcZbxKN2wEPvTArw6cyreLY8rUqbm1gZfjVRzV/ti2AMAG2K18ZeUL9mTJWefNm5umTXsn+4BGSCBv0q/fgNGEvmYQ
+9nkIGIGYAQzTiKnRQblqyBDJZ6AShBAjrrYgZvGygYXy1VOe4MB1TlDV+8EDSz44tVvmPlrANIXMQQgLvqKg0q81roGLcpct/SK1DVjRXoZBLItAEN21EIKx
+SnXmFs2j/7xC/zYmHYIegs+RJcaJxkaMjlHBj3a4yAKdzhkrXuGkODR2aN82JlzapoED1k+7fm2XXF/5F154LQ0Z3C+1DmV2jan6UaN2z/cooVvXrlneYEq9
+e/eKaPl+8Ls0XXDB77Niyf2ll14K2TTJRrHBkCEpZp3T3fc9HBMbrbKC0fDZgtJ9IadyLItfA/fSvwe/ZQyaa9fOAJrDcIZPPpmX+cGHvLlz52V+Ca7qiuzw
+TS7krx4jIxeHGVCtBHmSjXK1LJ3Kd78Etfmruk/oAkdZuMkUHjDlfxF5einqu4dhY1nd02qH9PRZyJoeq/3Jq/b0/gcfZD1VfcFJZuQOJ3rhq/erbvkCvsEB
+b/r06VG+TJigV7lP5n2SGkOqn4tQwnGt+eXFy8IIeTRiJcAoAUMEXg0cMkAJqEYAMIx7uoahmMVCbG3uFy2K/nYkeZVRsCRlGLQmWpJPmHDoWoBLGcpwjI8+
+mpnvK2sw3DrGLB07ts+O0CzWPXRPPo3+fBZ08AKe+nhep9M6Ofo2DgESCD7jNNOs5ZKnbBWuuvhfK2jQunFowkcTmVDu4sUxuI/fhmnhwiURyRdlWrUYWkjp
+i+ganXHGWWmXXfZKb7/99qoq667bM+277zeyA8u8/vob09Zbb51+ceovV8ll1113SdMmvxN4W+RybVq3CZ21Cf60MsYrbbOMBC50043Wh34YBjrmBv0mFIx3
+QvVZH/ihE7Dw7aAn+WDRBXj0LcDg28Fu/AqA5KGco8qQ3MAgszJWKt1/QYLc6VMib06kxVCfY5jUAb/aoVlZa1NsxX1OiiaOXINsDW5owUPRXZkVxB9aqk2Y
+6ZOnDhx4c0gtAqZxMDs2BjZ+AqvaLR3SZlMZmNBciYIMliIgInSVEMLJKAPjFIFASCuzBFaZAINAwHFUBzWuUB9RYCqHeAqoc/yUprw858rVFkpdNHEQXQGt
+RvtoBfDw5ptvp6nT56Z2rddOc2YtjO5U+9R/wHphEK1j0W9ZsFq6m1qoYC1wl1m8tQJGs+DfDMyKFWumiZMmp5dfnRRO1jr16NYl06sV1D1jDOPfeCONe218
+GrbpJhEgtKAMrwQBvHaKxUXOiwfJDyMAv8xwWmcrEx4zZryXrrvuP+FEL6exY19P/fr1y3XQ16vXevmcXA866ID03e8elGVB7hJ5RRubloSxrR2LrYsbi+gW
+CGOdK1okk0Z0R+aMgp7o1DoNZzMm0FWzcLl2q9LdW7rU5EBpkeCNdibLnnx1f8kQDPxUmGyHDuXrLtORBK+ZRvTW8YV6nJY+S8Ashk/XDjCUn/7uu3mSg6Oy
+I/iVh6caOX7A40jyXYMBNtrpynKGQysMtrLsBw3KrHZCOomJpnBgSZliD9HafFp6SvLAltDEFyrPaG7KKx26AISCeATKQ0x1JERWJ6IkTiAPMcozIr+QMX7n
+fhGgm0FpEkEQrHsIAQMhDtcEUnHDBy6m9ZUJQDkK7dmzR5o8eWoaP+7ltOHGm6cRI7ZLh0Ykx2AR7JIY2L+bXhzzahr7ynNp8ODNUt9+6wbesvsBHC0j/Mp/
+GgJ74vGx6YfHH5jWW3fdcJgJ6aorb0t77Dk8RyKCffTxZ9NmwwanQ797YJ55/Nf1t0YLqEtYAoaIXVrIsosjMxzxau7c+alXr245AOCxJkbbrt3acVnWxGo+
+pTIeCe8ffvhh/JoIaFxakzh/4YUXU5uI/vRD1mRjXOcaDtcmBJyDoYdBvyZD6GzKlCnZmTikWUs4tNKClPILY8HbTJZAoUVFEz7hokPw4BBMGTkHhs89MrV2
+VoMXOdM3e1JfkGEnaEOva7Bck3ObgE0/bEEwdbADdgGf8nhRto6hXCsPHv4ki/bsAU26rmy24mTnxQbKfIAewxwzdVG30FS6w/yCDaMbz/jgSGChh87ByY6E
+KYUQ7KaCEGIOQsS7lgijElwiVYl0kClXBaSM+5QCudaOA8lz3WZlF87qtTJaOQLSpDNszX+NGNUQCMU5g7rj9mfSYYfvkc79/Zlpww03yBGaAhsmszEGpK+P
+G5/+c9sd6W9/uzTt881vZzrnxAAaLzNmzFjZ0i5JDz10Qxq50070n2Wx225fS78947w0ZFC/9MRTY9KJP/5e+v73j4t6jD+lb++3b/rBD08IesvYEg9zYmzR
+Irpbq1MEizXLDoGWa7WI3QKly+A+Q6C0xo17hHxX16B4rbtkQuSyy65IZ511Rr7+xS9OS9/61jfT25Mnh6xjRi4rNGbqYmxXDDQWciPQ6faC0yKmbhk62hwG
+y7qtdYZLK9Z0jTJV3ry58ULp/zcLp6GvttHq0gPZ0jGj0X2Diy7pSjl8WFvT/WZDtWXjoGyHPay1Vo8sc3aiDON0D4w8vgm7Y/xwyBNIlYHfNZ7YDpvUerDP
+du3a5zzyEuDVz3Jb6VCl3vIsB7jAZEN4QTP4aHPPssziuMafa/6AFrzCyXlMvKknHy3KuN+0eLaoWebiOZFmnEBVwHStBHjDFgcRjBFAZY1RdGsQhlhJeUpw
+1HP1ssOF0DlOxSNPPTQxAgnjEsGbGFi0aFma+s6M9O9//zntFlPM+rANE6YktBJs9+7d8zFypx3TPvt8I536y9NTq6BzrYA1PwyrWRj5gw88l84886Q0cmQ4
+UST8wP/NffZO9933QJow4c3Us0fndNDBB2Yncp8RDR48OB1//PfTkd/7WfrayC2CtsUxydE68wq/JKK3a98mR7rPYrq9UJdvxR9dLVPQZdW+5goYN998W/r6
+1/fMeE466cS0Taw/tQ7YW225ZZadRdBzf3922njDARGtSzeubZt20RX5JH0a4zfbe6o8yZjhiOrkWQykDPzJ2oIr3ZmN03rQIUdYKxxfeVEXz8rSN13oujHw
+teNgfORhskonl2Mpo2xprcpY2EBdQhca1KEvcMkL37pinMF9ToDmsj6k1V8z4JWxW7VX8MCBR1l2qx6YbBnf7rM/tuy63hOIBJ08Oxll8INuvKJLWXToorJL
+dg0vWsGQlAG3KaYRgGnIGLnCBqsAyqvNF68HkHEAZkqREAGyh8zOBEyAVR2pwlQmYnCup65rDmqMkreaBNPqYsCBDjDkgSEaGat89NGsdP21l6ehQzfOjKAR
+PId6NdVz+eBQ3q67fi0ZyB9w0OF5wgDudu1iKjVw9+vXN1dVlmOrr86QwYPSVdfenw7af6fciipUDQDs9QJeixamYmOPXRifCD79vRmZbnLlXNOmfhzdyvXC
+OdcIA4wp4qBZophlMWUeYSLv0cuZ8Uekve66q9JOOw1PBx98UKZxjz1G1dsxppqc/nzp39LQYVtmh9faM76msf4FJifS1VqwQCtQornAoKvml/Lfi/FZm1Ym
+J5pmWkXc6mTGFC1bakVjEimMjTGxAXzTB+eXqozlg8sIya4amfuMk42BQV5+ydhvNXD0wA82GrRO8LIPZeALVWS4yrIH9euEFD3BoxzcYKlLFsoJJGwSjVri
+alf2VFb7oSv0g48OTs0R4cKHxkV9B9tUDhz1+UMO+5orGbWw8QxCAFIYQSIFQh2aVdEBQkwoR4BgEBqB1Xx1IRUxCEn3ojKKeIQp656y8givGrKyYIP50IN3
+xoLkc9mJqgCq0bvv+HICRxl8OB80aFC65qrL0l77HJw22rB/jnC9e9p9vLolU67Cyr/LpmcF1Tz3azJ2+WD6+LTxkN6xhUrXp3lMWLyd/vKXv6Utt9wit2i9
++nTM24+sybz99ox09dX/TBtvvFG66aZbQlHNwmGGpWuvuyFosR1nWbrkkr+H0++ZTj3t7DRmzMsxqzcyxg1t8lrRxEmTot5tQU+z2CHROesDb02DRw5ovOPa
+NiLBUKvCyMjXWFV0V66l9aQoZzeBpHx1BK3SsmXR5QuYHcI2rNeRoW1cur261mRQ5UC/dOZgN+TENhgclTQPWuNPtiEBl4x0AU0YsSfGTp/qwtM07IFNsA3B
+29JM7daBif6Kx84D25U+iOlt8kMgG1QOzxyITYHPKdCmvsaCbbFL58qWGcfSc0ITpwQLv1pp8gEXjWCBbfYw0yoDEkbMCwHgKBAi1lw9obgGQB6BAapp1Epw
+JMoSIRAmAQ4uYajrQJh6jbRoMTCmoOVflIVaAmXMy5aVRzWMQ0TZ4gDL0yMPv5j+9vfLwji3zApEA5juO/f7xhsT8jy/fJFngw2GZPy1DMU732ijDdMfL/pd
++u2Z54WQ10iTJryUZ7oy4V/6Q6kpdcxw6i3wqxE5l2zYDcayUrp1XSedfPJPa/H/83v88S/nvH79hqY+fbpnFzYm3Guvb+T8HUbskneHR0OT/nLlneGUl6yC
+0bZD79Sze8f0+muxbahB2mCDLVLnLmUvGx3SyaSJ74aBTVxZyjrV0jRw8LA0aOD6eVdEcBFT9aV1oRvGQm4c46VX3kgz3n0jdV93SN5ou07HNmnC+EkBY35a
+f+AmqX+0sMZ4JhgkemYfDM+5NbKPP56VHnv69TS4/7pRYkV6d/qHYR9rpSlvjc11OnXpG3B65qBA14yULhkoudaWgR1J1TZ1K9HLds06OtiblpnDgWFJgwwk
+sPAEnpYaLvrjPPTPpt1Du1/1wGf7tZHRg6o8wosWh/JsqqnoYDoaIsqEFFEQTY4BLa/lWPJU4M2coRhnWZMBWFK2JkTUaIDQ99//IE8hEzanUV+yaKpcZj4E
+IAKpq1+KIUJD09Bh/dPeKw0NbdV5/Kpzzjnnpt/HmCGlWC9Zu11aGq3DKT//XvrpiT/OExGF3jITBu+IEcPTn6PVgEsyWP+qZMtSSrNW0ftVZeSRH8W0iXHM
+A/c/lTbbfNt0wHf2TUNi8ZRxahl0/QhewHnzzTfTLbfcnu655z9p1932zDTusedeWe6vjp2UDjn4W2m/mMwwdpk6dVq6NLpyAtBLL4+P8dLm6bxzz8w7zPH1
+0Ucfp3/968Zo6f4R48Y9s0E9+khMjpx4ZLRsF2dZhghz9LzvvgfSRRddkLbbbqcsQy0M+fbs2TPjfuyJ59O2W2+Sfn3aT3LrbT0J7crQ28yZs9Jrr72WHnzo
+8fTiS+PTTiO2yjJkF1XfAuyDj76Yvr779unIIw9JA/r3j8ks61hlWUBZOwOee+75dN55v0/rD9g49e2zbgTzsimXExj4M9xqF87h5wjsCQ52SOZ01zLkxBGq
+ntkkx9StY5d1AkH56mD0pqdlLU4DAB4erBMasqjHifWiJDJwuJbvXBk4I+A2y1EUQkoGCHAFDdQV4o1meiw+IRAxEHMuwlFPl8F99eRhDOOQYZwgOUD1eoTJ
+V0ZrqJ/5+edlAyziXINlXeb+++7KXaU66CPc6kxwn3HG79KFF54fU9V7Rb1irMpc/c9b0+x4Hujiiy/MuNRBD57Qf9CB+6efnnJGTJ9vmfPR9OWkrLRgQXRr
+ViZwakKjhGbdpclT3o2u2Vkxs7bv/3MT67bbbpO++c1v5n14Z//+j6ldGwuPAs5Hadddtk+//vVpmWaw+4ch0os6V199TTrwwAOykblX0447jojWerM8qzhs
+k63Sww/fEl3GHevtVb+77LJLsg/wlJ+Pjoma6JZHy89ILTC/9vqkdM7vTk3GY3on/yuNGLFD0HBgdDFvTr86/YK07VYbZZmGaLMu581blq676o8JTXoqX5U2
+2WSTmPzZOx1++KHpoj/8Kf3njgfToGi5LGbTnYDLvhgtWuiBvdA3mRtvLlpUNloXfazI9lqm8cu6mTqm9+lcAo/9sk+tCccJ98g26b7yDk4ER7UV5dm8Vqra
+EPrITTn3m0Jcu3I8WFPHudhKbc4A5ySSMtVTEQMQ4PI0rZAr07J57DSOSGG7zfIoo6yoLVVBFQcqA1iGoq9K8GAhFF70pDV6ps022zTXrX8qQ08//Uw40QUx
+rb1ftJbl0Qx4ML39NsPS5Zf/NX3jG3vGDNgeq4RQYWy++WYxy8bhS5ei5jf85dBSXQdzjseivDJekOd6bBjiWWeemp2o5i2P/BXBR4FT6lXFar04RY8ePdIO
+O+yQ9t5n3zTu9TFp9G9+kY2GzCQK699//XTxny5Jhxzy3ZznHjlK5MRIjj32mHT/Aw/FTvQjsxPJd9RyaCTXXXbZOeuNQ339G9/MRjLmlQnpxuv+ljiJpKy6
+fhvWr7R37NghnPbYvDv66ON+nvbda2RE/7nppVcnpvvvvjH0tVmGU2GoBw541UjRYsz6h4vOj8B2errkL9eGU24Y9YrMazm9EmW1JH7xzvENKwRm+eBb8xEg
+2ZVxjW4nubAFjqDnInBXOPI4JpgcVjeOczlng+6h1wFOpQcfxpt4U4a9N4a4RKT5ubLBsxuI8atyNW6eV8tXoSBCs2naUXkM6S5qvh0ijGZaUhaBZoCUQ4hf
+EQexGEYYZt2zcwGMoRt0D2OLxwgiKS9V5T4Smzx7rDsgO4N7hIxO50ui7zxqj31ifejhLKBKc4WhhevXu0d6+81XM74M+Et/tHASumpa3R7pmsZGxu6DQvjz
+0/DtN89T1sqRm6SbQr7gwE8Gfh3VyIYP3z7WuP4Wi6TvpnW6rp/loK6yDgmMb+/3rXxer/EBjntVyWeecXo8NDg8l6v3ajmw4JRM9R951DFpXhjb9Pc+TMcf
+d0iqTlTLKF9oL/S6rrRX/vb/9n7pR8cdHN3GmdHVG5cu+P2vsxOBIYgoX+uxKXoGs9JCX2zi2GOOjh3tvWPM2DfrTlm6B4d9sQ2tCVtUl9M45xTKyJOM2+St
+HYHZw5V1fRQdJmjA0bJxGLSwRV3XPn1653tsmNOAiUfX1R7lo9ehOygfLOWa6tIpgBCzH/bXAYJIQER3h/squSfCfhqRQB6jnfHee2UNIaIAQqtDIZTXE1Ql
+3nVD4SIKIeASHmFhTl35unsDBvTLXZssqfijLBgijMcK1u3ZJUemWh/tyjAgfeoxL72a+8rGKuBWR1Ju3XV7ZLBVERVH/dXNzSnqfVUiCzDfmDA5Jgv2yPwL
+KoF6FZ6xY19LTz/9dHbGbbbZJmkJJTzgEe6dohv2u7MvSB9/8NYqNOAqIwk2hZ8yneuarCs/YDg3wSLh31gMj+Rfy4HnHp3vHM708CNPp2lTxsUs4hm5noCh
+TMX71FNPxS6KMVkvI0fuGLoYkGHBV2nf8+uj0lXRjV4jHvLbbrvtMhx/BBGJjV151dUxGTQxdNs27b7brtmR4UCXtG7sJtlyi01yqybAqcMO2Au9sh8J7RyR
+3RkueBhVC6KMnhX63VeuSV4GKPalrqn+CjMQx9ixU8b/2Wd24JRH+/W8WrUqXcjW4Yz272HD+A298FZ/0Kiw39y4FARlU6PFqRaNV284RJSKPFw3j2IogEIR
+S0wYsK2Cd8qDRLIYatrSFCtGK3OUqg4lVGURZvVsCnK/RIamgXNuKHlIpgNcuKvw0fT+B/FkacfygJYogz6bHBm4+xxhwpvv5G5A+NF/JTNyHfOetdKV+a+b
+Ky9W6nkVzpxdM+PC6YLYQrPFJgNi4XZSjnRkVtO9996Xd3vH5v/IIptlafz48Xkxl7LxIzGCDYYMSO9Om7QqL9+IP8qRlfdBnHvueemZZ1+MJ2x75XFU3z59
+Vt2v5cn7wgv/kO684/60TucOUeesvPujOlMt16NH93Cit9KwTbaJKftJuTUlv5qMxw477NA0aMim6eOZn6Q/XXJ5evSRu1atxVT9dQ3BLoz1ss2GDcyOoj6+
+qqPpfp/00xPTJptunZcILjj/3PSPK65Mhx16SA4iyr/zzjvp3tjNbuHb+IfBCxTg2CzKHtgQx9JT0dUG32K24Mv+2A37wT+90416DJ3dgcdG0A0+J1CHk4CN
+d0MJ+I1Xl0Q+2y6blOfnyTg40eRg6/DC37huFjU7RdCcRQHEumaQtbAKiFEHITZ71oiHEMD9MiRM2FHOGdVxIBQs46GMPBgSWZXBkCQSORfVLSZab2gdmzKr
+0nKhlX+qgZWdGGWhTzkLeroFWiNN8NyZFgRXd80qDC89MYaTCO+rUsWLjprwWRMZfTZvaizWrp0eefTpdMGFF0XrNCFmtl7PM4mjRu0eRdcJYayZNtq4LCJP
+mTK1Vl/1ywBssfmvFHjAr/huvvnmPMvVuXOndM3Vd6Ybb7w5F680Kivdd9/96fTTf5P69F0vzu9Ml1z65zCs1U67sli8kwLP7WOQ3jn97OSz099jecFs4ph4
+ZOPHJ5yYnWj9AUNj2nt63sQ7MZYJbKmSKi7nxtQ9unWIMa4F+0KD/EqXVurSoKFv396hy1Zpg422TEcecXh0LY9Nt97673TTzbekn//8lzFe9S6FsobJDozf
+4WHwbNKajXytB3tyj56rjZEhm1A+XDk7D/uUz/Y4AftTT52pU6dmp6vDFffqzKEH+sC2gfbdOJxLyjrgAYO9G6fFmlsZt/DcShQjN3EAMU+uAtFXdY4QjKjD
+aRBQnUFddTiI2Q+/ooCkPHzVIQmnNssijXpwKKffyvj9atUQrm7DxPgZsIF8EVaz3LKpXxRgIDg3NY+mumGkrTDwtzqtNoDVeavP4K/JlH1N1ciXxA7zvn16
+pvMu/Fs69Rc/j9vrpF/+8rgwlNtC8PGUbhitPXhW7G2KldRFp+RX4Knn5ST+hs8qB78lhP4xVSzQ7DBiWPo4pr3JHW/qV1o8TNh/4LD8qMSIHb8Wi8ExVo1F
+Vj2LUm4lzsxTaf1H7bFdOuaY72W022y3Y9orumuMnE7qgYfevXrlMtUmXORxSQTeWXNivBXbgMy0ki0jg0937vvfPy4dGi2QQP3BBx/mcq+88uqqiZl9v3Vg
+7mazJbbFNuClyw8//CgHdg5SW3tLNXTOqMmA7VYbYV9wkxOZgGkii5xcgymRhzqu/brvV0+GHeolgAEnG2ar+OGc4MPrOuPjTSojXkuBAcgIAiDEA6LrVZG4
+VpYwOUAVWGVUPiRaBoZg9g6j1Zit3RBSUWosd8ZsifEOHJgCB1wG2CwMz+Pa8qvBZClkQbSMfq6nd0WSMkFBYGgnJPTPi4euttt6w+zQtV79tZovikpw/r9S
+VWAus9qPVgUZ6z1jX5+Qvj5qRBjNDdFNG5IF/r9gFt5XO5Jy9v5J7kmhhv9KWuk3J01PvXr1yDLxuETDVB1Jnn19dp9oIbSmDe+tBL+yanmf3d13PZJO+MlP
+0xGHH5bWX79fNo6GsBueV9oz7JU0egFMp04d0t8vuzw/Acye6AFeduOXATryeCh2fVgi+MlPTkjPPvtcOve8P4RjCIjlvRycNkf6MNgaKI1xOQX7oWcTCeyC
+rTBmNinpXTF69sTZ2Cv9cTo0uSZrAVp9tsi2HMqzZ+XYsXto51BwlABSghY5KMd2GlfvBUCmpADmEYNIRCjHyZwrC7Dr+gtYdQ7E1cOGQAS6V5GCjRlCYcxV
+yGC5V2gyQ+SJ0DZp3LiJmZZMXPwpzJRmduONNkjPvTA2O5168BAYRYrCn8Rs2qbDNo4I0yFXh6sma1+T3iyD+2q89V79reV1EWuqefXa72uvvZF23mm7dPEf
+L4pB8xarnAg/X5W+CoaxnfS/aGFkXWNXg0khL1VsSFNDHFb3Z3zoYb2y88AOkv9ORQYcqmuPgfGA3/x0/vmj8ybYDTfcIMsfDf+Ljq+i/dP50aOIx3b/9tfr
+48nei3JgZCd0VX/JQoBlKxV2x44d8tLE3/92cejLU9Bla473ArIXemTYnEpLQX3y6Jhd0Xk9p3uHWWL5yknsCh3smd0pD5BALYhzOLDwVXpBpSVk31pPLT+a
+0aHM/PkLsn3Xa3VjYqX0JSuw6lCVeQRgGgGmtUX59dZbL78Jx85fwBwY0ApgHuEEJg8BYIHh2m/ZxlEilToIzi1jKJ4AJNFUXbu7X3zhiRiMvpvzqwKqge66
+69fSJ3PeybDhBs+qt6c/1Z0777NYYNw9aCizVfitMN6L2cZ773kqtV+n76q8jKTBHzAl9dX9cmKszZr3zpteTznlpNzyoTvTF7ySnxeuvBQvPrFP8O6778kv
+OQGn0lFhMpSG+V++r52yN09LNHPmrKz0Wve/fwud5GrSp8p0dZnS4uHpg/cmpt7r9UhHH31UNqZKO14ddlWMGfNSTHA8m+6//4HczQIHbQ3F8fEH72Zed95l
+q3Taab/IY+gbbrgxTZr0Zh7XgEsWjI69VBjk5Bg0aFA6+aQfpSefeDiMssywKaunorfBdqzdMGy2WAO6Fk6LBb58b5VVVhLM4ZT8KkMmbFM9j5iwVS2cfPVt
+MoazOrBuarvY7qS169OnT66HLjjsRaz8NGUoGIEEUEwC6FwyRgEU4ZpSZfVPlbNOQElgaLk4jHxlJQS7T+gQKmOHRMuW+uqrHytGuHJg124fOMrn1LhLeuaZ
+Z+Nx661WGXMV0JbRRTj3vPNjsHxS2uVro/LiL3p7xINlt95yQwxiT831wPmyIzz3/AupVbvWqWvnMvYryP77LyVJZILmLyd0LF08NW2//fdzlwWvaM9OEJb2
++ONPpJ+ceOrK54YWp6mT30h33nlXXrfIZRoArHJT/8tJWe8ucI8sjWG/XN/YLcw/Nq7GWHLp+5lmxuR9fV8uC36Vx6hRu+boTP50WPNNAhx02EmxITeeDo6u
+62OP3hcPSo6JWbvOWVdVBwEp9V1/YI7UHqWwYdcevgMOODSwLImtTgdE8O2Zd2hYr9k4Jl20RGgCA15p6NChqVvPQdHy2ARbuoL4FQx0a3Uli0OVRVR1TD4o
+oyHgDBKYYMsTnNShv2rHbL32mJTVZaxOZhOv2dzqhPKr3Qv21Ufq/Yqvqe6VGTjMcAjX5v89EiHitG3rmf+YCQsiEe2+iG9GDQMO/VX1EUUJDi2DX4x4k2mn
+eAFJxYMxhKhj1dqEBII5nhZPQguDUX9k7I6+4sp/pW/H4p8nY92rrRwcPzr+hzli/P7ci2M6d2quv0FsTD3vvAtitf97mcZKX/3lsFdeeW3aYbth6a47b8v4
+c8Uv/amzeRQRcfhLd+My8EsMo2FCl3TbbbenV15+On17/wPjYbwpCXWDBg3M95RpaOD4tVewpgrDtXO0oF90bBePazhnxKsS8uLSTGu89yg/K/TmvCmpd691
+c/1arsKtvwzpy4nBXHnlP1O/Xp1ik2u/NG3a9LTTyN0DVq9clK5XJ8EjJpHCZv58yUW5dXHvoNi1cebvzkl/uviiNGSDzdKtt92bPl0Qzz6t1TQ9/+wjeVq7
+8FAggRHE5zfc0jHdszt0MmbycXAIMmCL7tWD46CLbtHPRmpij+7Lh1P3TvAGg90J2vApBz59wyGPjcJb67rPXuEFC57GraIiQNWDFTI4LU7ROHe5bDBk+Jjj
+BJ5r4Z36k1oTgAwgOQrHAhzSOmXoeSOEaXbBNXMDlrx6oAEs1+7V6Xjl7QSfv2BhvATk+iwX+ODACLrRfGzMOD3z1IPRhXoqvfjiE+meu25OHogjGGXBUVYd
+yYzUC89PiPxyXfPzzQZ/0CKBUVND49cCSOhvmGqZI444NO37rf1DubPj2aaBuWXt27d0Jb+M03vi4mmk/3KuCpOPaHE6RDcDLhM4UTDn1jIVHvlsPHRg7jFk
+Bw8SKz21bPkttDd8WQsYyjKyn5/y09Snd/fcNR06dEg4xAU50Llfy4HjfNKEV9Luu+6cnch98rKw+rszT49Nsn+M9ZgWcd0hnvhNaccR22Sd1bqFFq/u+iS9
+/96kwF/sBwxLLb169cq/+GLQfvWK2BhZsE159RztumOV52prxkgcAFyOww7rPICyuntsiXNJ4MHHpjmV+9WBBH/8lb2KUUZTzCkMuhCgcm0xOAoAVoCtIBMY
+QMo71xWRjxGeizhIa1LGPUgxqi5HAR+BiK+Og1kMutbimTp3Xz44nh065ZSfxcr9BnmBU5574FaB2cXuaJjc40RgC9cc58knn4qW6hdp91HbxfM4xVGU+aqE
+ZqltPNtTE9wVp/Ge5ClavFIEXBXeRhttlK7951URWcubTHVT/lfy1qUYHWYH+D9lgg9p2rszQlfelxBTsf+nUMkgw7GvTojWW5cl1vFivPlVqe7asJt7j1jv
+qnz5lXbYYXjadNNNsk58zYJeGqZazm/neG+fXegNdUIO7OeEE36U9t9/v+wo5OLhSq1DlWGF+Xx0tXccuUeWI1kyVDDANE6yxiTfNTtjc2gynjax5LHz7Bgh
+ay/7XLoiglsc5MHp4KNPrREYtTUSbNkRmMpJrtl31b/fyg9+2S97bxZraDYrN+Y4WhKZiMwzGisBdevWPTsXHVZCOIGEKMoE3B4mTTDiIEAgxNVhOF6tr+vG
+YdXHYHUw9zGCKQJBB6MEEw7RaqeRu8bEwSGxs/mRrPTqRGA5lG14qCffLwVyIltehg/fNxkUG1iWcUn7XAZfyjb89V5wqfa/nVd8zhs1snWqbzwO8WhsA3pG
+Vk7oqLjx6+sMnEj+e/EELWW4vzrF66FC/p4ZslAsVfrzhT9RvG4ero9I577cygKVdg8QDonA431+dOBhwYaplrNlptna68VbYm+L3RZvZJrca3jQhzUVBstG
+TJygXZlKP5rW67FOuuXf90SLW2RQAwk7UE6AGzRoYGwx6p+dqOIAS9lXx45Nl19xbXxep204Q3kuiN0YThg6mBggOy0Reerq1YP92YWgPFweaFwSr0WT593t
+4OcAH/c4MOeGl+3jjw3o9SiHLmVy+RAae1ZOb0mq+WjS42LDjsYMGACAJb8Aa6E4Vm3mEO8asYycgtSTGKRyjG1evK2lejg4zqvAXSPMGz0JRB0EijZgYgI9
+8MAnUWKFIyLtPmqbNGrvI9Jf//r3DKMqAi0cq+Ehrx4UYlvK9ttvn0bsODSEUx4rnhUvR+nUpV0o8rWMT31JPfw88cRTsYVmq/TAw0/llXX34KzlJk6cGHxM
+jlm7HhF1j07PP/98rut+pU2dmm6++dZ4dqh0Ud13SB999GF6KHZGbDxsq5jpKlPyFYb7Iu2rY19PG26wfsbdqWP7NG78hMgvK+5kjGZKtaetS3zx4v33Z+T3
+/D37/NgsK3Ckagx2YKy91hqpezxpe/pvz8ovVIGzysxvTYzYg5UmT6RaxrmV/xeefyJtMnRwPMZ/dAS6h7MulYGr8ljtoNavMrSOdPyPTso7Gzhq1TkDrkYt
+n62wJ70A43YOIbEXAY+jsyF45ZGFPEmer4uA6YU47hmTsbeSik7BZKOVdrbZ0B61gnpYnJhMjMfYcpMBAwaP9hpajoFhrQBkKiMCUJ4HsHsQE4j8ipR3ugc4
+4binP1mZ4Agiky0ejNOgGTz5HKY6KcIkMMCrzSl6lJUvWm22yZB0+VU3p6efejKYMegr06V1vIM+9DO+yZMnR2txX/r9uRfGw3BnR3dlZHQJtJ4l8kTRmOHr
+HN2vu2LXwLrJ++TQpu4VV1wVW1suzrNNZsnmzJmVF1pzlA9FaIFOOOHUlY8M2MXeMva/XRr9+ZY5wlEUHsBitH/581/TBedfFlP50+IdDr3iratlkP9hyMWb
+ghaGbDp1ap9uuuXuNGhAn/ywHXmQ7+WXX5H+ef3t8XTsOqGD8gj325Onh0yWx9hrcJY5ed8cM233P/BozLJ5+1Os+4UcvVPi3cC54YYbZD1bq7GJ9gc/PDlt
+MLhPlvXEcN7b/n1n0B1rMRGR2QLaGe+rr45NF170h3TzLXflbTv9+/fNYyX6evvtyekPf7wkXsxSPuMD7+9+95fQ8+yVeinrgeA5GDojtsj++uuvp6tiP98J
+J41OA9bvFW8sja+BRHBlP1pA+uZIJcB6cWl5Fx/+awuF5+pQZMXQa4smP1BmeOyJDbENdMMBdrVXDl9bHfaoDLw1waOMg51Vh2Ur4DTyWRcCg0CharSUJ0Hk
+vkggcQQEcRT3lAdU3eo86iqj9SK06r1w1AEbxtRh8H6VI2jX6tb7GKr0MUj9ZjBEinfemZ5eG1seud5++M7RKvSOuX2tUJO8jvRhbO2fEI9bz4w9Wzvvsm1W
+BAcSDKpiCcI14Tz26Csxhb5lDLDXi2nel2M88nHacvMN86Mg0RGLB9/eDNwt0sgdt43JjwXplpsfCcccFq1piYyUY/Fz8uQZIafF8Uh7v+irR8sxbnx6+aV4
+J97m28XsZYf8vu4nHn8wfXPfb8fO9Z7RskzKhqOLgkcGMH7C22mLzTaKl1C2Tc8+Nya9NGZsGrnzdiHv8vgAXOT76CMPp+E77Bhjx0FZHi+MeT1ahoEhszJ+
+pB+yfWf6B/lFmd/+1p4xCzUzHsr7VzxisnfWKX0xOOOg++/TNZsdLf9eEeDWjE3BH8X3pj6NcVajCDh2FixJTz35cDriyO/l2d3/3PlQ6hsTEt5w68sVbMGW
+oGnvvJffJ9i8Vbe09x4jo5WJ97/FUooZRb2Wt9+eFu8RfDD16bdhtLIDsz51eWsLVO2L7iVwBXT32YbEnhbEjDB54TEui15TeSMQ+/xsYXn3vLIND/fIDzw2
+QE5etmmTqnJwkDEHQgP8AqiJmfLUdPENtpMddqutt1+hhWCwjCt7V/zWaWlG7Z5fCCQINGkIAFxTCwYnANhMCwJqBFKPYBCjbiY6ytZ8zinBDZaBOdgcrEYo
+9yodlTFwRCn19fc9W8PQzGwZv3lEwuZb99W1Z8vLMgwQ0SnBI4qByQDUmzPHZy1bRAvTOu7ZA1i21TM2OyTsSO/QocxQLoztSfgEhzIpCK/WyzzoVt7b93nA
+8uRxbAD+bElqF3U5pJk8Y5wWsSPAm0+9ow6/ZO0wW2q2cr11GWqr/OgKmcAhaeHt/MD7jBkfhtxjCj5mvGhJ94g+6EFramHRxMPcufHSyAgaPXt0CxmU7g+c
+ZNm9e/eAFdu5Yjy6NOTIqclA3XmxlcpYS5dfd9JDlJysezevBGiV68PFqNCu60zf6PXev48/nJ1mR+vWLt5V3nItL5DsHpzHv6DF4O/TkDu6G9pgXGaavc2o
+2gOa7NDu3LlLyG+mItnolcEHB8gPlMYEBH1ZvKbbaismx9igPAGkOiw4bBme6kBg0Sm+5Bt+wK+OXzap9XLeaOiwLfI3ZDGsgISgamgQmiAAVN9QsrXGuEhL
+Y4euRHgMFhzlEE7h1bkogDGDhzjnBE0oBoe6l5zAvdrVg7PWwRyaKkN+TV9K6Kj3LVqus/I7Re6LOvgCS9ONNzgkdShepCNENOrvahks/qGVExE+OpShaHxm
+eGFcIr9WEkw0gQdv5cNgl1Gt0SwG9muEDOMl+3j+dH75/lHLlmtnRVQ60VcDDBrhhdOBHnKFp8oPzZ4WFeO0hgIaOpVTBp0O12CU7mZ59xuc8tBM5s7pS1mP
+NdSuujy6RJdf9kDPegUCAfiMTT3Jm4IEEHKtEwHWHZ173g0Mzt8qvkEVb8HJsIrDl50I9KKMQOHpajO4cIJHl3CTQeG3aZYf3IIcGc38uLwvJOOJZ6T8MvYK
+lx2Bgz+68tLOL0LfZFNtDxzl8Yw/+iRPsiAn+OWBAXeTvv3WH61J5wgKYkg3DnKRup771YzOn2+ae1EG4D6i1K1O4QV+ALuX36kdzgKWPq8yCMMUQgjEFnye
+PTcmA+aFoKyVuIcJSXnn8BQcBRcclelaNtMTjBK4soSB2SoIBoaW6uzwMFyJcNBBOSI4fqshikTwo4VyJee+5mAcgrdKp19w0ZZDbJT1/rwu8apg+wrNPnIu
+kx8SWSjI0NBc6VscdKDNATf6HHhFl1QjZNFb+YIIujmGg17Ac1+3ynoeeVenMTuo9xC+kN/EAyZ85COf3JyDhSew8KwX4Bw/6HEP7c7J2Lm8InebQst2HLOJ
+kOklfBEPbGqh8Y5O+kRnhYcOsDgkeulSUoYxu1+cN7auBW8dwomyQUd5XyIBRxK0qm3jhQz9gknf4Nqho7w8NIOjZ0RWUTzbA37oFt/4Y2sCqrJgNBkyZKPR
+biBORo3ezgGHjMJEL01hJVB5RHJCCJSnxAoYUXmHbTAOuboMHFPguTbdiwlEwUN5yoKBYU5QFVuZJsgqcH1aeNWnEC0bXmzzkK8OwTh3VKNAr0OCA0+Uhi78
+5ygVecqg2S+4aKplfM0hKzobWDEytFZFWVT8PJThq9/V6Hz5gfH53I03vGZjj1YKbRlWtHxmEZUnP7JSBk0UCTY9uC+pJ48uRGwOJ8lTxsFR8QiWa3yQvetq
+UAxCa12jq3zRmVzJjxyV8etVxrqtgim46K66qS2cPLiMsapd5S+ehxOpK6GDngVO+tS10/KAYWHaxlM8sykwJNfoVhd8PQFy1lX3Si5dUu8/52D01irGruRo
+ls5LM8sXV8p70A0fvOu77rbJCLTqQR968Bek5USe5FDxuyYjtDnHf5Nu3XuOdoFAXTKCZxAKMSDCIlDEA+SwJiBff1pXQtdF9wcyCCjULwOtTLsHDyLBsIBm
+IU0+7/dLWcqL8HnGKWhSp8JDsIOg5CtnYoGxEaxyjI7A8IB+SpWvy8dpwZdPiZwCPGXR6hcNhItOsMjFUY0ZnT68ZXcyGAzLh52XfV6cPkf36L7g8fOch9bo
+hsS6RphClosuTlASgSSmYJtEfz26tO4brNvRrYsbJOYnjNGUDS4y0I0W4x8tGr7kVecmE/zi0T0y0fXzYkcwGCS9ue8az8pwIvzWpAz4xhpoW/BZeeGiCRm9
+Et169VuuFWsvYQMcmr3gmfXpDsOl96IsuuDUEhuX6TaTO2edNdMrq2OhN/jQQuEdXVpPep41e2bWMdjGVAIR5xPE2R/9m8Ej28aNS+tDvqX1ixYs9KRlgYPu
+ygfZyk5vfFpS4bjgsFnl0dE+1jKDpNy1RLsDH+pwdjInO3l+mwwesuHo6lkESXGSCgpVgWstquBFbIy5lwUaBleRUQplUzAYDJAywfJLwCYyasRVDgOEhg7n
+CKNY+BwEoL4EnnvwOXffOWWqB75z+RJ+ssAiMlFepQ04tIPrPrwOBohX+aKde4wBTHjAI+hKD8dzyIMTD+Aaa8Enj3EIOBRuYoHToVUgoTB8VDrgcE9iaGHL
+WdbkQw4OMhZELFy7pgt0w0U2tSwYzr0nHE1kg3+8OJTFn6N0UYu80cJQBAm7FSpfunRkZDcMw9OFUgZcLQCY8JhEoWO4BDnwtUj1wUWyp/fZs3w1r+jfs0Ho
+gRsv4Hxm3BxByURMluPS0r3Duy54CRreoOqtwPbele4wmZCdXz0m5eCT2J1rMnbIR7dE9mjAI/xVx+Gj2YmVJWt6dQ/fVVdNBg4cMtqF9RgMKIAAzIt8zitA
+nlgVpk5FiAhltFwE57y2LroLCKDQOi5w30qy+u75ZaDVoDhq25hBwzDGwGSgxWBLS0Cw6mGKcAgBk8rUVAUFrvOsnJWtq0E+5cuvDuk+XHiUCIpBoVc0NrGC
+HjNOC6NFVVfrhTZ8kFWFV4OH1ghd+CVTkRJM9PtOrTUP9eBChwQWmHZQ60LqworkFM448YtOdegEfPKr8oSHvOmjBiy8qiupJ+lJSPJNT6OJvtwHS11fw/CO
+QnxpBdDGkRivBU4GjS740USG7EbCpwSeaWN1Jc4DHrkKLuQNFzmoT8Zaa91fOLt27ZZp1Jpo/SufelCCEbvSqhT52zIULU7oi/0IYPCa8ofDwb7wDD9Z6RbD
+yYbARpt89Tg4+sCGx33BQ88NrWBk++rRc73RIpwmk0AoYO0QHOVIgKngGvCaR3CIMWXMGCCATBdKOcgJVF3wwUYggh2E4FodDCCm4mKcmvbKGJwMxhqE8nBh
+wH39ffAluBgDuGhzTrlRNMrNy/eVcR8MggZDWfxVntHiHJ3KK6ubgi/34AajCjsLPMr6rcYNprrkAA651qlYdY0BoniGozuIL2XIFfwi79IVg4eBmxxgiq6V
+lZQlW7zUbrlr+OGp/Fae0eMc3+TsnRVwyxOEGD960aElAN/snICly1Z5zl36oENggAN+sOlJHQZLN2gxlilGzchLKwWPc9t4tDCu4SRfQQRNYHAKegSTA6Lb
+UIBe1Zk5c1Z2DF8eN6mBjyiaFsdH0+ijLs7WCRI00hG6HFX/gg4eq65cV1jo09oJjmjxsCr8dCJASU3ad+g0mjMgHCCICAZDKlXEujmQYEg5QsColqxGMEL2
+cWBlwEMkhYOjDMbBVpehYFRZ91zDpQ4G3KMUA1YCMltGGZQND1qUVwPigIcAAEAASURBVA8M5eGpExbyCKPcL04hj5I4BiGC4bziRjM4DsqKnxzJRF00GRui
+3bl6eIOj8skQ5OMx447WSGQETz7c8HrosHngMgbBo7LKkANnyLIL2oKxLGvwlcG7X3ygvfKOb9dgUy6Dd0+9HIACNl3Br35WfPCAXvISoUVsdEv0K5Gzbike
+wNOCqWNWk97JB81wOOAjH3qGDz0CJprQXA+81kCgJQETTnLFC1jZYYMGRuwcvFlBp0kF/FkTIytrbyYbjIFMENAbWuN/1I2JiZALvsgHfegFj4PByabkuY8/
+sq+yxYvgLXgF+Vl+YHEeOExWuJdp32ijYaPzYDiAKYQZrz8qxlTGHioRAsCMl5O5X4RaFlYrsxh2DhaBSIgjUDM2lUhCAJeDilhVsFWJ4DhEPgqpNDBIjBMA
+uK7BFiHkMRR01TrV8Ny3VqVvz3gITfcOnQSMP7+itPqacfhtl6nO41eUhV95+7vyYDZoMMCFXxm0OldGywqmrqEPjenzk4FZpEAXNKyVaYeTbHUbReG5MQMF
+ppelKM849ftrt4hc4aBJ3T84GWFJxfDpiIzwAT4Zkxc8aJPQhW8BCzC8SVVPljDKNiyL9B9l56EjMFvFuMQEgq4Rm0ADOasrwQEvw6tyq3aDVvfYirU4Y0pJ
+XbZBf+TrlW5Izbpp2z4HE/ySA7haB/fYCVhgcrI8vg3dLo6dGOwQvWwbneiHn4OTDzrRxfHYg+6jaf6s91iHAtNYi/zy/YBFTq45Oltp0rlzt9GIdwPwKnTE
+QQwJhbkPCOB+ax0MI4YwRQXdgEq4rh3F1Zbo41go40xgY8KB+Sp4ZR1wKAMPuPIwI195+NCjDKEY2KKDAiqdlCoaedoRDxSsXw8W+tR1qINnNEpV0fLcJwM4
+JTCr0Ctu9dBFkfhQFl/wmYo1CUEmCxbE91UX6T4Y9wlQJXpXeOjw6L7dA3AycF0+v1Xu8sFFB524xo9ruHVf6bDqSH1dbQ9n4oeMaiuuPl61LmgoMiifiZQv
+uOl2kTuDVqYGLN/TXRYD/8JHacXIp+pRADBUYGgMV100osuvlrHK1s4QPNCjfGXJDz0Wdn1vChz1zLCRRR0nkYV75GCs9Hl8awqO2mLl1iry5SmDrxpY67DD
+Dh10o6faGRmRI3kJdO7hPUjLeuY86ISHgynfZMDAwaOdAIYBBKsoVSIZrqlGjOheuI8hjBMUpcjznU4CwSDC5VEEoorSy769KiwGLaooBy9GEa88PH6r4TPQ
+6iTGcMopz0gpzaJnpSMbcdDvAcUKD6w6loIfPPUpDH/oJgdl/LqPropX90FCp/uS33roqonolCbCUpD9eAxZPnnhZUnANS4hO2sW6qNHFPWaXWsfeX0tIm0N
+FlXR1dnBQptukTzllMG3fOf4Fa3zOChoQk/lB69krwy9oMVWILRLZAAH2XEWDklOymrR6YxzuU//YKHB/WLIZRbXPbCrXvGZ6Qq5Oyd3s23ga4WVc1F161eA
+QbeFbOtE6kTBLDvl60ttvOqMYWvZvDTSBAk+BQM6I3v2UZZVSutFt5zSfWXtwC/6W/2eRduh2GKVsbKckd4ERQ4Gd5M+fdcfjTkFJBUAA7jMbhQGRS6IAUUU
+hjBIABhWD3DnfinVuToVnvIURIjZqKKOyMeB3UMHhanHiU2ZLojtSLPnzAuDKU2te9UhDL4xY+xScYmGBpZwcGL0ogfeHF3DyPDmcA2fbgfjkLSY6EWP7o4A
+ousbf3I3zhQu4yiCLFFUqzFh4pT06ivPpS/iy+geYQC7DNJ1T8uWGNPV5Pzw4y+nt9/5MPXq2TlaoegCRn3y1B1Rh0FaK7FLpFMMcJ0X5ZUuK0fBb7OmMUER
+eBrFNhvyzw/wRSuo22Nig0pNkhgoO9e1RRcZwUcGtUXlyN7f/VmMM9rF/j3l6BwtWlswc4sX52bPOJQAhl56Iy8HvdO5eoUnzlGCjvvkRgbFqWI8GWXlcyY4
+TXnPmzcnf5LStSDkPj11Xqdzthuv6qJPupkfYx280znnZvie1WI/Agka6A+MEpzLliI0uGfBHA/4F8TQ7fOh6Cl6K/onczTUljzDD0fGa6ald59+oxVi2DIo
+iIBdMxjnHMq9Qnx5KR/CMMCoASVA0Y9iIOSpYJQoU56BEbl01WqXj4ARhhgH5uADk5AWL16WZnzg0YUB6aPoFr4xYUp+CSM64LB9hKApEg8hufgY8iex1UjX
+zTikTCqgFR5l0UlYxaDKY8XZKFc5TxkToY3BUQLajIU0495MRCHGDehgZG++PT0dfuj+8T2iE+NxgD7ptjseCAfwBtPumS7Gwzk5wyNPjku/OfW4dNTh34n6
+S9M119yS+q2/XsaDLokc4Kxf4StjgqIjfMMbpYpzhZPofljgDrsJvksXq1mzJrG7+pF4bVf3CFYd42uHD8Qm3t5hjJ3y9HXtPnFe7wW8/74xabvth8W6Vpv0
+wCMvBf2tY1+gKfYyY8vw4a700S06BRH5jNJ1Xb+hd2WKkZcF2hrQ5Bv7+ESoVpkTaCmyDgOB++Tl9QV0UBwKX6WHYOkBXPZENqyejsFnf3CTEcdQjsOjr8Kp
+vLvWa1C34sYn3GyCvZQF8rIGB6Zy9K8c2GzfeaOdRu62AgEVgILWPERPUVolFQgJQkgk1+rwZgaWgcU1Y9Ac1zKlfF03KE1sNXwOox4H5KyVhshKTz4zNv3k
++MPjZYvH5BYL09fH651+PfqCtOnQAbmLBI46DM6ugIcefCU+tLV5fun+008/n8a8ODEeFdguK0BZ/FA6BfjFA6FrhbRsNuOaISwGYKaoTK7YTqIbhU/RSuuo
+nunQKVOnx2dJzokvUWyXnx71IpGJEyelgw85KhtlMagmwV+8OCZ2L9x0w9X5cXldEHzffvud6ccn/jKesRqcFY4WrQi+TIbQhy4LA/00d6nCSCJ6Gn+ZyMh9
++DCm7EhB44rIbxX0PfjAY+mPF5+TDj7owBycpk+fHs8UXRyPh7wSj250z/Qw0sVBU48e3dNpp/4svxiS3t566+34ftJp0S3iRD6sRS5lX5pAhCc2oTv9xRda
+hrL2Qp9sBC2Mnu7JmT0wcHLX3Yvq+VGM7YaPTOPfmJLmzpoaj5TsH4P2udkZ6AoOXXi6MFMn5S58BEQtD1zuCdjsl5wcAhCcDjCMwdCBLrgFM05C945Fi8sY
+y1Q/3VabUJ8OwGTTcOEDTMHeAZbkvMk6nbuOdqKgyjyX51l8BChHxriHCNc8kEBEeIAZA+QG/PIrkYQBZiFCi7F6cA85x8AcmGY9GITIwmA+/nhO8tzMmWee
+Hu8x+yAdecQRObofcMB3otFZkh57/NnUr2+vzBSnf2/GByGEBemmG/+eX3iy88iR6bsHH5h69e6Sv5LQvVunTBsjEBjwqLVFP9oNxku0Ed3KtK9Ibdqb4LQs
+XnLhc50dO3bIgtOlfeixMemc+B7S1/fcIx5m+1286PDr+d3YRx55RLSig9Kvf3VqPNvUL2B/np579on4fOcD+WsRJ5xwQrxl9FupV69e6aAw9AUL5qX/3P5A
+6hXvlyuRMF7c2LVL7nvrUul+0AN5iZKc2oDX9Dn5ruldd3FfC23C4sEHXo5XYp2dfvCD4/ITuxdeeGHabbfd8nsZ7r773pDpR2HYYVQRILTe1137j/x2Jjzc
+fPNN8bamb8eTxNulS/78j9yC6R3UaEw+aEQL2fmlS91jBk22WnH6r60/mYPBZjigOjffdEP+muLxPzg6Xs81LF1w8ZVp/b49Qt7FNtgFGMY7z784Lo0f92I8
+ibss3k0+KfVct1tQUZ5PEnTAU1Yij0pf1nMEI8n9Yr/lKQO2zkl06fQuyE89vw7OB5Zz5SoMPMAnH4zqpNFzXt26cIQ6TakA5LW7xoEIhEMNHDgwA2GM8kRX
+ZQmuGqn6PLYQWyYGasQCQz7G1CnP1JR3kDOcZdGNOvmkn2QnGjBgQLxK9yfxlbnd09ixY/PrtSZNfCVeIH9dfo3WzfGAWs+e3dKD99+eX9Zx66235N9HH30k
+HRUGfeSh30oz4it46KFEjiFVYaEb34RDSIJK7XqWBb4yQyNfN0JwIAt09u/bLX/E7LHHHs0vfwSbc/zlL3+Jz5tsm86PN44uDV6eiDHRtddel59QPe644yLy
+r5+N78gjj4z3N7yXvvH1PdOH78fYJZyTUTJQ6xzkxZm1ZsYrzhk0nZj8+Cy6Q/LQgz+y/OKLFfElvOHpqKOOiKdgn4qv5u2YH6/XUqL9oovOjXHVkpDJzPTc
+M4+n6/55WR5j0psXf2699TbpiAhcvvJ++GH7xxuZxkfUL7NoZEVOElhaeDpmM2iuwZVNoIeOycRyA/qdP//cy/EBgMvzS1XOP//8eOnk/fH+u/3T1ZdfEC/r
+vCPKtg8dmEW1nNEiPiN6TzrmqAPi8fk3ws5ejXdLPBO09co8CGqMnW44cZ1QQRt+yEqwLC1HeeoATWijf3XYsAMcMtKd7Bw79dmmeupLeOJoYKpLFmA55DUa
+vsPOKxp6MYFQJmJ4nAoQA6IcYSijxVLGPeUYqcF53M5lKyNmrWwlgVA98AhcF2bSpMn5ZYP9+/eLSP5eTHe2TBPfnBLjje/EeOOEMIajciQVvW+77bb8Yo3z
+zjsvdz1ef31cGNiszLCWCh39+w/In1M89NBD88sGfTnBm0IZ9V57fyt3HQhdYhBoZATq4s2BJ/wTDoVymFIn+sQxA5QH1/FskY9zHbD/PvmrD5zjkEMOCSPc
+Ot6iOiX98Ic/jMfb78l4yEWLpJvy+uuv53Ivv/xypvuMM87ITj9w4OD8oNrOu+wWsoO3vKHWoJ68m0QrTemMiwzp4sNoVXS/rTOtWFGmbyn/kYfvj3dC3JCN
+0/Wrr76aX8j405/+NPnc5EEHHZTrMxB6owuOP378+Phk5yUZD8PXFXzxxTHpa1/bJb7wNypkV3aP0KNuGjnpYmkZyc8ak5bIOcfu0aNHhq+ngmYtuE+YXhbv
+Bj/qqCPjW1J75Rb82XiDKxz77/+d+HTp3umeu28PucW2q7bxTrl570bA1ELuFwFnenxz99YIXN/Ige7Agw4NPIKONaDSvaIzTkHmNSCZQbXrHl14pXcyro/7
+eFyFbtSpgYJdVAfiTK7xRF7w0Q85kEH1iaYQY1RBvwBSgIIOAlGBQHR3TEMqa3wAiKjBUDDB6KrnglGMaFlqG4QY4LnO8KN78v4HH4eCRuT3Fjz2+NNRb2Ee
+yI977fn40sJfsoH/4x//iJc8npcNkhH4rAncXhLZo0f3zBg8hNO1S9c0+vTRiVFLBC5ymAb1VfIqSDxK4DBIi4paGvfBko9fCf94k/IqeeCx1R+ItyaNjUfJ
+f51hTJgwIUdz5XQD0XPjjTdm/GD60PFmm22Wneu3v/1txqOs/j1jMFbxQWIOu3BhTJhEV5deyKzMyplNLVPt4Okmdw/+Q4zZkO19Wx7OZJtMfGkpbbXVlvld
+FZtvvvmqlzVqZX7961/n3oSgZVzTp0+fbFyXXnppfP7lPiRl2nw5nh7teCl5ZZ3Kfj+9h2qQy5eWLhU7IB8Gh3eGxpDJn82suWb5hvBmWwxPe+65Z25d8ChQ
+jho1Kj730i+6uvvFezOuzO/B8KKTt9+eHI41Ku0QXx8URH1v97LLLouyfXMP4vvHHZOOPvbENGRQ7wio5cMJZFPssPQc8qMTjaMFal82UZcJovLoCT2jMTrK
+OajTPztYHJM3aBbAOFYNGuCWgNAs75Esj6SvXiJqWjyOELxjwZaaMjUt0jAiDgSYZpyBOa/jIsTYWoJAA3FCRBxBV0IogvNohmkeQbfcckN8gfz8tPfe38h5
+3//+sfGV7IfiBR9X51fW9o3X2oqQP4kuHVySSO8FkYSsKwY+Jz788CPSgw8+mE6MD1lVJ9Jdui8+8NX1pq7RCoyL2rMy/QyQcBiJPWIU7yADvDinDJFHRCI4
+60Nmdlq3LtHHGMTTrhKnUU6Xqlu3btnxrrnmmtxd8U0ghkomjPT444/PLdK0adNyXX/IigLxMnnaxzHuWzc7CfzoMKPFGLX0Fhy1PKI+fCYann9+fPps0bK0
+9VYDwjHKTF///j1zK3/vvffGZ1QOzfDheeSRR9K///3vLE/GiK4bbrghupzXhgwPzwGzEkZfNdLKM91M723bNs7vi/PtIDDpffbssj+PnvBRHElAKOuNxp+S
+1wD4cnmnTh0joNwd7xo/Oudr/YwTPZHbu3fv5POb7KcGPDTvu+++uZsr8Dz66KO5N9StW5f04YxJ4Ui9cosCmLdTsVEyZcfkZ1LGJFIILtteeQzdpgKbVdvH
+DhKv+ipbheoEhplYs4jszD1l8YU/srENzjII3UjW/TwIkoWiAAWqEDhzJYBEDorDBEAcyhS2roZ7GHbfTI37xhciOSSYMong+f5/XXd16rne4HjjTb908HcP
+i2/xHBUOdXNEyEFZEF4K8qPjj4sPdV2cI7E+cf/+/fOHrwyWn4n3pT3wwAM5j1LRaabv1ltvzUbws5/9LDPlz5NPPplO/tnJ2TEeffSx+IJD/5WOUbpvnBmt
+nm/R5ZBcF94bZWflcORhVsszQ5yNsSoXSyg5aZXxyZlE9XPPPTdPIuirk1HDpGullSKTmtTTXSOvLh3LU50CGKdGF2fKkwohYwumxk2S+wvjsy1/uuSM3Ipf
+dvlV2XAtEYzYceusfF0zNF999dXpsMMOi0mYk/J4jPM3TL/4xS/yPkY0cB6yMeYtvJftQtF5WrkT2xYpzw/FwmvszF68uAzCXbMZtmGmEf+MWauq1deaPvzI
+K+lXpx2b677yyit5HIYOMhesN91003CiEVmW6P7BD36QW3JOBB77Y48mn9iYb0VJpadQ4HToUNbv6CTrKeRGhrqgElnLr8kaJb7xzJadC6b4wJND0sBYRFcX
+PGNJdm94I4Co17QaJUeiZEqyQEjJjImxQASoe4CJ6BByIIJSFgLX1QABh2xqvDN66y03TX++9KIccTT7ZV9Xiu7O5nlB9rTTTosW6vd5ULrFFptl4uEyu/Wd
+73wnxj37xKdCLsgTHfnmyj+MlSE0TN6x5tP1finghpvvSgMH9A6jspLfLDsfJWuBjXfwIgLiEc0ikIT2OTHI19Li2St3RWHjJt3bw484Kg/Ix417Pd5990Qe
++3Bgg1WJgVSlkQ05Mo6axyjOOuusmN7/fjbavn26ZwVyHMnYjazImUxFyACTeRgf36t98L5bcz0R9fLL/py+tute8aaiielf116R69OJMc8f//jH3LoPHjw4
+5/uDNgld8hve0zJ4bEG3kjzW67NxuFHpWqpHTsU5ypIIGHRVZUd+7itLtn7JLC3/qEyvBzABEF+6xMccc0yeDDFeNIzAK95NQuy0007xCuqXVgUltNEdRxof
+gbZzt4E5WOhp0I26DolO2azWCI30yUHZjHMTXpxHebaPTvavgXAukJABXuhM61vhus9Z20Q31z14YrJh5ArA3CQkBEFSjKZswlwNrGwjIgjEcBxEIBCxdbyF
+WYS88cakNHKn7fPnGkUykYiCHeBryocOHZqVYIzEKPfbb798H8P/+c9/8qA0cxB/0NgwYQIdcFMkhnRVvvvd72YGp0U3auhmI+Pbpv0zPfWZntoqWI8x80Xo
+BMgIqmBNwzdrFm/2iXUILZGy+s5W/XVNnn76yRiLbBGBoHPuw3vxZE3oRFfDJA+dUqX3lFNOydFNt+/iP12a/nTpP/JLFsm3OHt5GJGhkrEF57HjpqSzzjgl
+ukZHposvvjh3F40d3nrrrcyj8djtt98e3ea9cyD6zW9+kw2CrCpd8Luu9Lz44ovxsa9nsyHpEt511105+qOjV69h8RqwLUNfZUcC+ahbf7U2Fi0ldoJvumBo
+nCUH3GiZXhk7IT30wG2Z34r3gAMOSGYuR8ZyxVclk0V2vsAlCbg77LBDTE58LX33kMPT08+8EO9qsPBfnlsKsjIPNTiik17RxKbhJVfylNhulYt857UVxUMN
+Gnbre4tTvac++1aHHUtNuvdYdzQD8RqqihBABY1BFK4RBDESQpWt4xeGyakIjTApfWkIeMyYZ2Jcc12+FnkHDRqUWx3di169euUB8R133JH7wE888XgWGuei
+/MOiO2KGqTJKCF8+0CBPmXpvo402yq8E1tpsu+12+WPDb0x8KyJR2XVRadRa6Ue3bFkeuKuCBlMZXVJ7qFa1VCEPfeWZM2elDQPHz352UnZ0vBhj1GCDjoYJ
+bRK4UuWH04+Irsy2226bW7MNN9wgnXXuX/JaCoMXdeu4R+vFYMF+I9ZTRG9T28YXe+yxR3z79e/ZcQQCs4e6SiZpwNBl2mqrrfKUe6WlOhSaR48enfkQ/W21
+wg9H9LvLLrvEWtOcMNiXwgF6ZPrVqYYNTpGnKeEyGCdHePDHhsj48adeTeeefVpuefQ8evfuk+644/Ystz59+mS4YNVU6dRbqjpmg9bpTNawu5tuvDk+ED07
+d3kFYbLS4+A4cKOxypxTS37J0L0arNBYg7BfMmTfunc1v120TGiRKj1otBGAf+SGZPjwnVYYOFXh8jqFeKsBrnyKBKAaIYKUcw0p4ZqD553u6br5Up7HBv59
+643J7BuYFKXezjvvnIny50c/+lHuv9cukckEfVADv+q46jqqYGrlyqh7BIQOTj0tWiIOyQkeeODBMLZRMXbYJUcuMHXzjDe0ehLDq44kIGgxCcjA1IzTvOg+
+6XLp3t19173pnN+fkU6JMdj++++fDVGA+P9LVb61XKWV0U6cODE+EHBKOvjgQ9Nr4+Ll9z26ZV4Yp3L4o2BG+cD9d+dpYDxdfvnlWea77rprzCBulE499dQ8
+rqiTLnCR54YbbhhT2S/myY+K369pb1H/zDPPbJidP1LMwBmT5YOhQ0ekb+y1U+Av0/CMtU4I6B6xA/xxXPSK4PUjXHSGj7vv+ncE1hdjAunEvEjM2KUqFzxK
+DQNRw3vydffOOOPMeH/7k/mD07vvvlvQtW/YahmnNmwltBroYY/st9ILD3rlV7o5Ar3DgV42jT68VLsAr9qfuuyMTvgJu29sQ6QCWiUVFWKAksoiN8NCgF95
+1TvVgdB17a65hmT+pwvy6r5zcMeNG5cZ071DqASm6EJgNemiaL0wQACUVhnkOIxeNJHgAqMKHy6pV7R21p4mTZqUBg8pYwODVc7BOAwei0OVxUT14SF4ONwD
+m5C0QnAQtuvoEOTntZyDpevhfd933nlnxu2Pvr+ZOq2tLhOZViUwXEeldYMNNsgLzXBsPHTj/HZXNBRFlxevkA9lzZw5K22z7Yh09jkXZplts802efKFE5kg
+8FWJw6Ilr8nkBp7Nzt19991Zl+gylrOWpetcJ2ngkwzmBTJdZF08C7Mjdtwi5FL2VlY5oJGdMED6leiVfXiGyz37He9/6KW037f2zkb3m9+MzpMfZKtO1R3+
+6MAhr9oDmTmv1wLGoEED87vFhw/fPg0cvEnoxZPG5U1EVaZg02e1W/nOBXEOVWgr9u4evLXVV1c5B7zFXspTCspq+SpcjkgOZNBY35cArAEROkGIKAA6VFaQ
+swDulzDdUxcgCA0Qq3AxN/Wd97PwEAmx9QORV1eDoUruqUMQNRGA1k0khcu2FfAZqk2h//rXv2J1/qI8fWomDwyGqTvofdY1DRs2LC/itQkYPWK2MPMWxqil
+M4tTjZswtEz9+vXLfMIlspmM8Gy+yMrZe8TalS3/bTv1zY9GULLujwCh62TdymBY6/vPf/4zy4jBCAym8QUAcjagdkydOjWTqptRFWsj54KFZXc8OqphwgWW
+X4PeV157M97b/eOQW4qJgiGZF46NHnxKHJgjCyZ6Alp+snOfjK+44orUKwIO3dKBfEbzpz/9KeuSc7755ps5v2fP7und6e9lZxY4yJydFDmVBwfRK8+9zz8v
+j3m4nz5/Pxu/lksZO1WkasDK44sM6KReKytV3uu1aXOtOIcYtdvINO6Nt7NM0K8sGsCiU7DYlpaDzYHPbum/3hOg6JkcjLXYQ4WFfsMbcHuEbMHS+tEZ+vEk
+QIPbVCU3skOsNGpTfTzUmgXADBpxiMAQIA7XGOJY1SAoXEu1eEmUXekgHMm4wjafLycMNUy6ZBZetVwSHGeffXY2hoMPPjjPQFkVJygzPpnuoEnf2YFuDo/e
+gr5R+mR+2Qtmvp9w8INeAsI/XjgYPpwLCvh66eW30tzZbwUVPeKLFL1iMbBnmjdzRsapvlkdj8DfdNNN2dkeirUw/P/qV7/K4z5KN2YZMmRIMoX/y1/+MrcA
+psnRJ3EwvKjnUeouHa3Ml3cfMAD8oYmjOJd2HL5pzErOSKee9tvo6t0eL7e/NWY298rOAx7HIXMTNwIEw+NEtgtNi26vxW3jH0EN3wIF3k1akJ1WySGg0Y/d
+AR2DRu9V54RkU42WcbEJB2PEB5tZvnzOKh7JXD3dd/rBP8MmD3i0nPhjkA6zrrrLYPqyBR3VyRw0mXZmF11jLenjD95OjeMTNp7zIid04cGvJ73pSVDGI7qq
+zaIH7a7VE7ToEy76b5jUsyZVeyfuqUfG6Oa8sc+xNLNu5jejBnDAHJhFCGFCRrHKV4KVyR4dZXgmQTIQTPRerwuQOanjTThSNYZ8EX8I68vOJJoahDMKK9+E
+qxtD6YfHwF6U171jnJxu/fXXz4ZiYZYQMfnkk09lIyoBIQa+sbsXfZyIYBy2GFm8VYZQ8YV+PE97Z0Z8veIX0U2bGNts7o4tN/vEZ1XeCIoXZ3zK2Qa0RXzB
+nMGilwLMKnFoA2NOdvLJJ2dHYtCuDbbxZY1Muueee1cFmClTpobB2n7DyT3qEe+5iOl3yxFkZOxpO4t7nmPyDu3dR+0dC6135GdvLBFYryIb3SAzd7p7ZITO
+p+LbULYxCVKWB8hSyySRm8Bkit65nRA77zwyO8DkydNCJuWhRIbPeOi+7pEkS3U4Ix2RZwlwxfF9+pTc8aDbe84552SafJwazZwGfVp1ux3Ib8yYMbkraoZu
+REzKWFeU4GZfDm84ionurC+zajkvbI1NsQHOgl6/1eAtFJMPu1VOoku2jT48KK91rj0CeYKJbUWcja3jT4AwHMBbU0YKAEG4yaAAIhwGpW/s7Z/tw6OthRAU
+xLxQOeVFVIQ7RCsP3OkfE6iESHhqUgdOv5ghgJr05XWFECzpy9vqAq6x07SIqNaO7GYW7SmUo5pKrcli5MMPP5T7+r6qHWjyk5CzZ7yfnQWdcBIOHsFwHSTF
++RoxGzQ3XX/dP2KP2kZZoW3atI5NtCfGdpXt05ZbbhFOsH5uGTlNr+geSWhAmxaV84PLcUR9AtfFki+Zbat844uSJTL5POTtVwSt0/XKRlbA9DhA0Q+e8hOy
+bdZO1//r3/HpzyNzK3PN1ddkPZlSJ3/dadtrJDolT84F77HHHpvpdM2A0TV58uR4Ruqa3F22WdaakhnQTz8tH0Kga10auluxonRryE8QscWmTRgb+YrW68Qu
+BsnDd+yLcZIvOeHxyiuvzAHHrhCBxeSJoGTGUcA5/fTTcxmblbWy0vjx4/PeQfqa9s67qWuP7vmNvmvExBiYjJ4uNAJsFS1SmaUtG2urPXIUwVN59mycqbxr
+sjJzK2j9f63dCRzfc/048M8uzTGMOWabNnPfx4hcQ46fqBwpV78lqfyIXyLSsX45chP1JzkKueZIxJgZRY5KjhLDxgxDOSbn2P/1fH322r6tMdu8H4/P9/h8
+3sfr/brfr/fxUS+/CT3xPt50jwDJbyd2ChJtY3kF5LoQERGMdTAEgkHMW2/FvEs0DhAAyEtAAABwgghRuc03mMeOSoRnCYpZEKASYKT69pu2NBFbyWShKJQx
+EkRbImK222XQrCN1WIu6Idhg2ZgMU+hL/76LJaLAjKm1J28hG4EhzkuoLay85JLLUoiGxcCd9bDw0xjj8MMPjzofz7GIezWR2fZ7vgxwiJyB3/ow44m11lor
+Vz3Ii2FaoWjb18fttts2tbKtC8sNGtj8+d6/Jny27YOPe90jLNIb0U9jj549rXSOxbOhUS3hendqLM/q3SeE44F4E+GmeQ62CWICDTcDQ9CturBgloWSjB8l
+81AjRoxIC2uFCGaCSxO5lJQoGUux5pqr5T4mdMdIBIeC1ZdSqvAHr/7jGWv5HIwiLRtnPMDx+BCg5ZYbnFqcEmYV4IfVtoSJkJ955pnp/lbkFg3gF3OjGR4w
+ka3sqFFjmpVXXC7ut0EvtNc2OPAlehNgQu4ZGC2i1k98rL6k+7RybZ42WotPKNopU2acJ4iPlYFXSSyB0dDnaMvaJLsb6+jXVvIApEEVAkZmwkBKNaBj8kAq
+oAAhj7Ay4JwoOvqW30eZ9q0V/FSTr9qrJD+BrXsApFEIqASBJhi5K1Y3CDdzDcx5IDYX0OBY0kEwlKYXADD432CDjwW8xiOt0GoDg+qL/GmWAybl3w7CLzto
+jah/aK5WIETcN2U2i8WTNLYJREn/4UcqwRwaLoi+0KgYWHmCxe0Df+WV3yUhTll2ms480ZJ9tk7c2joeYIV/LnLZLiju2dPKkNilG0oOM8P15NgVbD2g5N1D
+rIaARqX99tsvxz8idDX2wLTGTBSSJLjQmbgvaAPORReNQFPgzBhBgKneKYyZ0Ugf4AOeWCv4deAkBg8bH+H8AemacTe5QvLpg7JwZDEtq2MMxSJRzOCzil5C
+J/QBi/Gd59x5i2rhGw7woHoJUCdd9aG17u38En6tMvCOh/GClf14QRvVFzBql2UDq8tEvfzaoli0q57uMqvYhTEAoTJMjiB+KwQ5ChG6119/OTvozC+n3gBA
+I5JGXosJsqWXXip2QY7OxaZmrnfZZdf0y2mHFqDuGVUSgCizDQZtArwzIbhojygdYTSuETZmqXS6hEcZ8EruCVWGiDXjJtiuvmIimaAWsvVJ/8FcLkG/vn0S
+QcZhe+/9hawLXAQKQWhzxGRdaMcSYBn9tljV6mkDYv2k2SG8nhd88kqYgoC6/8wzzzY95g8XJroALvcwoykKXkP0Ku7pWyiN6Detry8rrfLRZsKTE7K+wcsN
+zi0QLCC6wQ9m4RZzTYwZ3DM9QFgkcGoL7dFdfgKB6eFx0qTns51FF+kVfVkgztCIYEws+yFU2rewV1v4B07hPNemxa81YtOeyJd2Mb9UOGCJDjrooHR5uaDG
+wWPGjMkJZwoSDoq2YAS3oINlQ0NDadkWf+99f21WSqs0I2yOTmgGx+ChmEvpo7NzHfCxe/rreWtlW1e/8EBR6Jc8YMYnknIusBFiqbsjj+oFUswvX7IsEKTp
+AGJ2VsYFUqmOLhoWSkgQ87NW3BlaQIhxmf4rNmeedXa8Vv7UiMxcku4GRBEcgEOccQZhss1AMh7imrAohXAAY1zEnzkVout+leFL77zzTgFLDBRfeyn7oH+I
+igEtYISksqDKQ/CLL7VWhnUw1pIwi//mi4x51EODUyzcR4GEggO+IJklreQZuAo29+u3iBpFQ6td9evrmlVX6h+4NtZsw++WKAmUaIsFQmRr1wy0tSUtsvCC
+zWVXXBeW8EfBYJvnnBHtj5mkElpum6tSwYVBK4FdMvck+GDFi82Tm262ZYhHq6BMCVhviEn14x8hdHhGO3CKtq8ED4D1bw8/Fbwg0jsDXmW0jVdMFwgSDRzY
+rnSBZ213jiMToGkfxcD+ep2LoIukHCGhtIxZ8KcLjvCj9sBGecCvE7EIgbE8Vxkvgx//K6Nv6C75DUf6IxWO0IxcSDEh257w8uKL/5zuygGGuZ6B6FbzaUgj
+jtuVEOq119q3Pug4KVdG5YBbd53VmjNOP6u5++57AlEfTW3NohhYQhx3yViD8AhxA4z7ZpafWwZw9dW332Bw1e/qHHjck2heIVX7XNpoT7scHoFffTUGlXF4
++5133xeb854JmDFO+0aHAQOWbZ4Y99dEptUAwtraKm1lvGCiEjEQiQIAqwQOeSG84OuEsQRHXvflJ4RWtFvm88gjY5tHH3kgX4VJyNNtCPdN8rtPn8WD2CYA
+23kuG/rAoK5evRZq/vHcuLT4W225Vbq7NafmubaLef2vCwwzw6U9qyAefvjh1P7myaR+/fon7VkqlgdM4NFfFgCOXOrjoXguVP3OGxNz/87AgYNyr5G6tOsC
+B2tgOuPLX/5yjkNZ8FkJUcH56KOPhgs4YJoS/Fd4B2tHO30TNnUJiJmPq347KAVfYX7wgE3bhEQ/8CkBLGHTH0Il4W95i6bwrS6X3wTTc33t+npUplKamaYl
+pRolDLSgZ9UoVwmT6JRKhBJ9a0hlnnmFiP/cAnVtGOHWm0ffEmB1ySUqfF9J/TprsKldv9UhaidZPCkVAgHs8t9VvzNTfGjbPcmErVXPhRD3Lrvy5niP6rMh
+BL1i/86Q2IW7W7PVFpuGNn0l3IOxAUO8zS/6I91++x9S2L2zp+BVv2T9n60J6mY5uUz1rGAt+GaGUfmCU3+FmhETwRwh1X3BZXOALqiDYeGfBcWgxqVtWLyd
+gPYqSsn4r93Q10b1bDyUaFapYPIbPJ2Xe5UKLvSkMMzlyNu62XYAtKeYsjzcb98UJ4VbPFN1gx1zermy5DAVrvi4cY/nJK97+i+/dv323fnbs0oFm3zGxrbD
+m4S/509/S0EsPvUN3gp5+29zXylCAsBdgxMGAe59F5/4Xe36je/V4R6eVh6PeoZ/XWRFfV3WG7LhVA0BEhIJgAcIQcJdInbuISbfmCuoc8zppEnPpYb2W3xe
+4xpr87cDsmcnvdD87tYbQ8v9fdpY6YnEkTalAp4rZaJQKNsAVFLPB0ngkVdnhw0bli6jMZSEqDQIhLkgRdK+BY9cif32OzTmXjaOgb0Jum6x5fnqsKR3Z4hY
+eFjd8vt2CRETKBOGkvZdlapP9b/zWz5EoEBOPfXUZlBE2ISf99//wFhr97cMi6TrlvC126TVp12Eg2N0QVDW1DkNq6++SmxV+XFG2lg58zMFj3KzSp7P6tn4
+aWF848TFFlu82WrrHfJglmIeZ9GZAEVnPAO3hAmeUylM+z1gQCyAvfqK5rqYK9s+1sUNHz48x7ngm5ME7/rPyzDpbeL4rrvuDvf/Y7HNZu/E4z9j7I2P9Qle
+4KfGOGVZ4A3t9ZkiIHS+8YyLNZMEQPTHpW/6rX11VxvyKVP3uq244srDiyAa14gGVKKwTpBcALTLZtqDBUlq+ZbKq1CjgAV43ePPP/b4xGbgskvn3I+om1Cs
+QaO21C/5JozKG9AbmBYCM8NsPtSlTR1l5cwzcfEQFyKYbstyTAJaOkN4zPaL/HAnl44Q+WmnnZwrMG64/jfBmGuE67ZdhtgpEjDpo6QtzO9ev2lLR9zrvOT1
+f+ZU9+GT7w1WuBDdczbDccceHcuRls032AkoYNaiCyWgTsKkry7PR998d2zT/lkwfe+0pAbxFZ1Dw86k/YJBXfVbHnSWnzLlbgub77nnHoG7l5tfXHh1s8pK
+gzKI8ny8/wgDwmtrmWYcCYwZ1TF/WNgH/jo2FOdOcVrQwbm6wLyVXc5ctw+awKc+NGAlWUOTtIMHLxc0WC5PmnrymVeb/su0x2Mbz+MBZcr6+I2X4K9w5ltd
+eBUvw4VyosxB4enKlhzAcVke9XAP8SlrjIbq6hKrovNlzDpGgBBYZg81DBiA1OALY5aQ8deNF+TVYYjVYBEaYTABX/p3f3ig+fNdN8bAfHCEZgdExO2rub9E
+u9rrRJjyynUSWb7ZJfWAlWWj9TCTsYIoH2tqYlVYWCCAX83yiMDdeuuteZaBgfjQoVtnn++66/asB3PqO+uEuRJpgXTtSNWm+yJ7hHfLCM9bPT0z/JX38ccf
+T1xrWzI+MPfk1KLlYpv9xpsMjbvhhkXI21kMCIm4LJjxEPzzGPr379fcd//DMRm7d0wYH5KaWqgbs9oQWe1F4YTFf/SVSuFVP+DMNIN1guqneOBq/PhxMQn6
+UAj7Js1ndvpsKk8TtOrCULSyqRP5JczoEE2n4zok89JLL0x+wEtHxSpze4qkmXGTN9/nA34xfAmjsS/6Pv74uPAOLshDaBTf4GObhoC0S57eeCM23wXNWxhn
+DAvk0z4hKqulfjyHh93Hg5KgWR3UX4qxcAaXfut7dwUhpZL/HqpIY779JxR+139IJFTuA0Lj8vsvQSiCsFr8+FVW6Nd89WtfD815Ti4m5crQ6CYCMV0lCFdO
+KsDr2ey+q4MEpeZ7RAbfL4kYjYlIovkmjMJVcXD7lltt0wz/wQ9j9+mZGcIHYxFPfYIhNVbw35isfHPLW0wswodUmr/gM84aHm5OJaF97sTaa6/TLLzYwLTM
+5pCUc/QwQWbtu8bRxHBDKQiHOx1nwjMvNsPibAbKghAJ5tDeUuEPfRDdhamM+wibvhpzuG9LheCP+sFN21oRMWHCU6F42jkp8HuGpvjAm/nkX3BBVxvBozQf
+ePDvee7gBRecm0qg5tDwiKSOgi1vzOZDfjDiOdFSwQn8wzJZyDx8+PdSMaPlhRddksd6rbTKWuEFDcgIHv4zZaAefQC79gkAfKAj/JIDFx7WnnwSuD3X14Ib
+LISQcDMm2TMIxcAaMc8DKbQgraWA537XPb8JkcaEEpVDrGqkAFBOgML5AgsttEAifocdd2kuveSCnIMRUBg8eHBG7GgazOBsAa4XQZxThBc9OsuBC3yS+xDl
+u2Dk4pmUZI2vvuryZtE+KzQvvTA2uDDmc6aODDh6xNjj9Jy7MDdjDkP4fuDAgTkHxLUB91lnnZWRLtsURPMEEkT+OtuFL+veCI7wdCUWyvwTN3OroRs0V115
+WT2K7zhkPuaPYpPC9HsrrLROnn1x/ai7m1NPOCJcpT65Pk0GgkRQK+lrS6e3MxrHUgsGoLF2jfGsIhA1RVOp8MfVxiiLL96OHQjQ88/HOrVgPlbHyoXnnv9n
+83gc2bz9J7cIXukVFvKhZq1YCXHeuT/L/gwcOCgW8X6nueiii5rxMfaSik/yzwf4qPzgssKE5bAuzxgVf1q7xz3fa689c3xtvvGkk09rfnP9H5pPbL62jcXJ
+u/qNF+zr6tZtxsvqCImkr+ipPbxLgHzDAxxK8rpPCNVFiMAVbz1prYwHbihE0jBgFVQZSyUPYSqhI+UaxfS0oAYy5BllaVhzVFLN2Sy91BJBhDebldYc2oy4
+6PRYsvLpJB6tSPtZ/oPQ6sPwJQBZyQf8KCbA3MKyGKbugbXqpAwserVVgGbjKo0ceWP2T/62jHHD67m5jRsmFMzSmU9yuk0NTo29bG+Xhg4dmgEEYVxEN1OP
+OAhhoailQFZ5SGUpjJFYCHNnn/nMjrH+bPtmlZXb8wiMmwKYwHNrDWjFe+75Y/PLCy9p3pz8dJ7yqq8G4Cw8+PS56taOVQOioRiJ+0nQjRXBx7ISaszXWQ7+
+uan4oV+/ZdIysYItzdXaJv267/4Hmv/9xreaB+//Yx5sc9aZP0k8itQddNDXp3sHpViq7Jx+o1+5/PiQ8jW3aCkT/LtHgZlOuGLERrEB8/jmO0d+u4njFJJP
+8QNlMTksObiLl9Eaj0utpbGEyCGgMxa/4m04hQ9WrCLN8uOpLltsuc1UlcsIUJlVjJn9V0iFGE/CFPJq2D1nFjCFmS/K1jFFKi9G85ugKUOT6cSoUddH57+Y
+g0XzAgiLCUTIPG8Z+T8H6wnE+3yUANp+LahRS4j0CwzGAgSBG8T62cekT7NLYNdvCsJYSvjVJLFkA5x6K0pI6bBMNKbxkhA5HHOXRJ1MwIJH0lfJxLS1b1xI
+9zDMeyXhZAtnDznksObEE4/LoAAhAZNoJ7jUrx5jREJPwYHJvBfFAReCEvDNglmOA+cu9GKhMZ55G17D7NL48U80N4Zr5XRbvEAoKUiMjSYiuvgBT81Lgtvi
+1aKJ+vRXIIvFt3qc+0qxHH74kc2vLr2q6buUl17He3ljXsnYvlw1tMTHLvdKUMvyOMjUCg4CU+1Sula+L7pouyY1cVYROlaGaQOc3wqpDBL8VhHEkmL35IMU
+BCFIObYKwinr8kwez9XbCmac5RZIti7sU5/aOQa25zW77LpHEg/yuVjzIkQQWoRiEbguBMYgmsWzxX14jE30y5IUa+IIkf5Ahu+Zr2J4OKClWARuBJNeCeMp
+B5c0PotuG4UQuUldC3H59Nw/QiSvMtVX9YjaGS8SNvibGQ7/4VPKU3nie6OPb5gMZKcrqwKHykrq1k/CySKByUJaAZNzzz03J4Pds1Kj1rSVEClH82JKygFt
+JTDIM/Plvgn3/b68b7pvhEi/CZHEHSbgRRvl5zRpQ7JipU6OQhP0gRf9ZQGtGH/44YcziESx7LvvPs2E8Q9lf7TbMxb9ojnFgpZo9uqrkxPv5U3hV0m/ubMC
+FhK8WNKlHzwENCRo3P+ugClGEJZESExPgwKyxhLuAVZ+gPhtQg7hlHG/XAMNkWz3CB+A5G+XZbRHxwqhbv/JT4dQvRbCFm+zC2EsJpgbRGdP40PbkK4uS1xY
+H/NSxgDcRsyKcYxTtCOv/ivnu/NyD9zSxRdfktZEnwhoEdYzIXB1cpWUgQ99hgMCwi3k0xPqSvKxBvAsWe0hmIEpOmGY+be8ZUEHxthGgENfVw5XEAMRjkpg
+YC0JA0ZmnVjgQRG6BxNLw2pINLK2wEDo4abW442JYIzkHrg7L/eVkygRdXITubSlhHgb1h9K8Kb8nKYqY5Pk+Bhr2RTIChdPVt36DJfG2SKQAiWfiCOXjdNz
++DKNNwgQwdBv43xLhcqzwrNWibM8+N9zdHohhKiU2cTYksO9Wyh2TXMVu8qosrIqMgIacXxDBmAhQGUk0iw3wXL5T4JpaD60e8qyXPJbOQFg9auD8NEEIiUY
+7eGHJ4UgvZzLPixNkRBmXoQpK4kPDGPcYR2fQIE29QkcLqmYIP90fHhexBsx4sqYw9g93Lp2u4g6C1ZF1CsUa+en+S/P4FW/LSkS0DAHos4SFJvszCFVG/CF
+Di5p1v1vGVDezTbfOpWUzYkUII/BNgnMXAmeBTHMqVkpr00WCoOwFoRK0ia6YUyMOmzYsOlwEX7uqPReuPKM9TI2YQVsHiwh8syY0HpEadb9ykfv+1G8CK9c
+N0cQEE77lrjwFAj4PJcIDYGAg6Gbb5KvkTGEsJigpUP7DR7CZ2EB4SrFKVoKp3BNeeLv5YO21ge6Z9zIA0Bj1rs77QkIN1gUAJA0ANB8nYKmQv/5mACALELj
+t44yd2+8McMsIpB61SWfRKC0A/gpAWzz7sQc1GJ2LooBPRNddWahufzQzmOPPTYdhhKO92MITdl/5Uhj+c8557ywJPs2q62xfjKb58qDvxI/2wp1cySelavE
+pREF5QpKnrloaAEGsJV1gSc4xNDvleJxJnmWX76dpzL4RxcJE2AGCW3QiiCZA0NsS38kAQ/CJOmj+mrsR+hrDKc+4xrPJPDJrw9S/a4gijopFXRX1uWZqFqF
+wLPgXH5Uu6KMvAwbQAmpEDhlzspSnnCJFhSbocUXvzgswuKXB9ytRwVOeFtwwZZ38T9lpH/oZVxIYCh6xgB+9XXy5FfS1QO+MngUX8N5V4wNkaURMZ9CtkgU
+4BCDKASCFPrvmXsawQyEz4BSWURUp/y15EgZ2tgzAMn/cnRSQmSdsxyltlTo1LwmSDGnBA4JzLOrF3yEiG989NHHhhB9Jcdzf33gnrA6KyeibTIkCJI+wQPt
+6Fv0jKWRuHyYicDAG+ttnERjixYKLCgvGUdhdPicnRLRhz59FkvtD1/3R9QM8c0F2T7embhn1h6ylARKhEsCk0Qh2CBJGRhXYiAJDBIcVtDIfzhE485E0AgK
+IZLQXr+5rlzaLSPgUmsWZ4f/znpn/l1l0RPPYHRzSuaP0ESwyp6mmvS1/4pSWGaZvrFZ8KCI5F0c/VwiriXT5VOPOvEo/nOhv37ACwHx3G+XCK4kYCGvzZUE
+teWZ6HBZHsRGWEi0atY3SYc4yFEZRCpYAiO/uSJmUMPyKNcyWEwahkSXdsIkJNi2CzsLWbCFFhmQwsP1OPDAA7IT6i6kzYzMOfnP36+I08zEn1U9+gVWQn7E
+t49sjjr+Z83ue+yZb4SQn1BiWC4UIuqn/JJghvYwZSWaUxluDQ2KSZVXP8bWXmk7LhfXSFLvrBKc1LNBgwY2v7n2uhw077zzTlmfMRqmlwedfHM5CTRG5qax
+kubvWEkwc1Ptq8J0olymIGh0cEkYVJAGw5x99jl5r4TJt2SSlBIxHiQ8LK5BP4VinCLc3imMWWgeP+z30iZcnnfe+c0jsS7QHJqxGWUi3M8a4jd0+WRu8+/S
+jLj8t/nqmN9cc2Uspn08xrPtShEGBc+iiXrxrf/43pkTcMl788wcFIPjjJN4EP9jqznpIwBcOszBZBWBIU8BQgLgQm6UTqsUdWTFynbv3r6pG340OjVmwV57
+rT0jDzBSmUPAWO388suTm6232jA6O3+ug+PSfZiJlfAyK8tvKAtwvZeAlhBNCu165JHfb24Zc0ezzRbr5bgIskMXh2VrT50lHJL6iqnUj2kszWFZDHgN7lkM
+BO3fv38+R1j4bBVQG+ZXx9ChQ9PFzYrf56P6INDwta9+pdkn3BZCYDVFheMLruqTMLfoHMuB2SUMPz4G7WghUIGG4CI4ZcEtWrWolmvWRkD3jZJTY0nTvtP7
+DZ/oSxjNhbF+eAbuKWGKhhIxPpPeC//5cDYflCFcsaZwCscjI4C0zz6tFd7vK/s35rBYfHnRiZttlb45p8mTX8lxoLHuXyKSedNNo+PE1ovifLy102o5GqEM
+gjER2NFpoQXbsDlD496C8X/KlPbcEv+7GQoQFoIkuVlE8LsVgHYwpgOQ0F7t/ncIcz/4KRMmUY50c438RijAyMccq1M+QYc6EyKqTcDk/TBSEQuiaUtMI72X
+VXIfrOPiVNE99/zvmBOa2Kw/ZM0w3+0Z3PrSNEuG8mgPROSidSZ9UwcLxD3ip2NcLoZV2NwmETXjv4Kh8Fn/wSkkP7uEPlKF388+++f539wQ91mqPPqkfi4k
+AbKJTpKPdjVBSogkS5zkcdZFlUffhSIfgdT3DTYcGlbtu/EWjJ9M5xN5Xepj8RxoIhJogpRixvRC7mgvFW3yz1x+UOi8Gn2rLSS777F387OzfponTHElzzzz
+rKy99rxRBKYBTDILOpjvuuTiC2I/3H3NCssPipc9TIy30bdvTS+XD40ID74kPPDp2TvvtAsUDIfQgTHq6oEO66CCJNJ/yPPMAK01e+2bxBO5oX0lMXT/dcgy
+dmMeZWlndREYCZLtngUIf7RlHq+AWbQZH2+rQNhllumXVikLzOOHvrRtNOm6QOKsEgaQD6wPPvjXZvDKH4txx7PR7x4xnnkh+8ZVZbqb5tnUypjP4Rt8ZOWq
+nWKQcePG5fhp4MBwvUK7SywTF8tVB5BU/vqGY7j8oAmd5luwf1iAkxIWruINN7TjH3guYaj6MbkwuFUQ5SFoi3ATYv0YPnx4Nl9lWbitw60z2azM3XeOiZD4
+0Hj9zgGxKuSU/xCmwoVK4E3CR3jiw0jwLVFY66yzdk7ADgihkOzm7jb/siEsD6Qi6x4rQsaHxTX2NC797ne/m3QwKc31c0aiuS6LqC+95KLmE1tt0oyMVfQs
+Db7lfRAgQgvXhAZeWqXSBlPkA5Ooc84jYXJRDAzN/VIBd4ZGpokQxpsZ+IYiduZ9aIQpU96Jb0f99o6XSA2KQzv6xPN2zKQj3AX+NqSqm2XyTYrVu1SMGe6+
+63fR4SdydbEojMEt4IqYiaV5+Hivuqp+zzH6Gmus3qy31vIxnuibg0qIBDt8YJBVVx/SXHX1rxOptG6tmChGrfoQSQRyn332ybCssYgJYRbJs1IuylUZ3TO2
+ImizS9Ue4m728TWbXosNziVDm222aczf3J6nAnXWIT/iw7mxEReH/4/JCI5xkgCJeSZ0B1MnzpRR7oocAAArvklEQVRFf1Mekn1Iu31uz1gVcki4zSdn3fLD
+ke9KykhC0AIr6C519jlvzOFHCasIHQFfNN5v22epFXKO553X/9FMem5SwqFfPAN9FSq3wp4LTKCMnyhDJxdR+jyyE084Lk4k6hf0bjfvkQeuKfyZFtCuvATK
+ZRcufOFtctC1zTQ1fUSFypSZPJXRPRlZJ89K0xAEzFbWiwA4jRJQAMCA5pAQvEV0K82Yk6/qea4ja5aKybPfBAP3T2KafZfmBeHKglsyIcu16UyQ4rmLFhci
+3mbbHVIhlF9Pw7GkNLHx32K9eyWy+dciRZb/6Is6OpmIMJhLsarBQk2EY5m4mfK5MKfvcnm5dDQld1Aq2Dthnvk35aefhP/n55yfNBK9MtiG2846iqmtfnC2
+nvGL5VFgNQfjf8GjTjRW3oDexGb//v3Ckk0MEBZOoTKWsFL+sMO+GRPdx2U/tKGOStW+sSG3zhhOmhe6Vt2+Z7QX5yxMag8MtbB3nQg4sO4UgyAL5jd8QQPC
+TLmbpKYIudwmdo1rKZofn3p8c8/dtwe/thPxlADe9YyBADvawa/VOerVZ7B02XzoVlNFzwyKCYqZckKjUM+e7Wn7Ag2ExrjGoRdLLBH74qcJVqAmgdOABHCn
+g77xpiXmr+d/9eZ529P8WgwEuNZcvtuMuun6tETmBL70pX1j8vCOrAsMRZAiQP3PDB0f9dy3zjHJxkYiRxBbptnz0pyXXz4iX/TrRc0UweSYoSY8EKOMOigK
+i26vGHFJ84vY9/KFvffKVk1o2s1brloxUdUtk3v1v/Cj7s5E8SAYi0SgO8t05qvf4IcDxPzyl78ah3e+2Nx6x4PN5bEIeIcdPplzKYIeJlYJRLVXcKiH8tRP
+ilKSr37njfgAL4Vhzsm474ADDw7Ld2/whBeBtRs/F4vV6jfe+NsYW30nJne/m0qnE/76TZnZi+SgF3BUH6qtOfmustxUK1eMT4S8d/zUTs1vrrkq+PfZcFcf
+yDEf+jkKDW9z5QSzRHEFg4TlKTeK0sS0cZzx4e57fCECGU8FfzslqN2sivfxPdh5FBSo3wSzR4/WS+tK83pYx7giEsZvEd8SDdLlw2y5rmhahdw3HVNeeJNw
+IIhwYR3VRGgwIxNMi0rKuBALsqUxt96Wqw8WWmjB6TPpYPHc5Xfn/6rDM7/rObghkNknRKJFhKjyeS7/z39+bgrRNttsn7PaloGUEEGUPmFCixMnTHi6Oexb
+RzR7xNvTK4mEmTuh3dSt3qqbUBiX+V9JXS44pJ3Hh2tlTocQWf1AiMDVWabKdn7rp0SRsRRedym6eNVV1+R9S6EwGEWAFgWXh5buaN841TPtSfVbJAxcQuCE
+iPtEiO644w/NT844Lcp5o8eMOZy3AmfexH7MMUcFM/5gOt+U0ihYBTSMKblZEnxVAoP/ddX9+q77vvGSOgUM7PdiUR9+ZOy0rMEfPZdNHKO/+TNuLAttUa7o
+o+CP9g488MAUqlIy5p9YJumLw/Zu7rj9lmyLwtCXt8MjM35lWYuO+AMe4SN5ZdmPDhqugy4SpiDGx1TGQwAHmG/WxgywgrSasZJn/ivrQkCMqEGRDs8Q89Vp
+wuW++nWImU1ado3BXY9u4XZslwJpScsmMeHJJdOui3BzuwhF3ev81rZ2WTXu4dChQ3PVs2iWtlzatkTklJNPjXVxB+REqxORaB7MZKCpHUhz2R9knLhAKIAT
+jz82cVLumG8D2XHjxmXol+A6EcmCUCs0hIMLN+pk6e3E5ZubZyGA2lhppZVSM2I2zDI7QUJsfZGPxf3lL64IF2aJONnptXh9ys5ZhxNhhXzVD8djx47NBbq0
+uPsYkCanjfWDK2pbuYW2xnKsJNfVIJ2CPOHEU4J5Xw7L3G7eU4YVU/8///lis3acXXf++efkeRebbrJx1tvZFzwgYobu5tQkzyX96KQj/it6zfxcXuV22+1z
+uTRI4Oeoo44NX9g59fM3Yx+6O15APSzm1pbKCCVlB0Z0N1/GIuET80qEwrpL7jQDYDsJfGyyyabxYvBbkx9sUgS7dXfOrnfEGFgpMXD6DQ/61Z1AuKGxuhSG
+rNJUhEMHCBKB859LB0gRCx3HLJhcWb8xkcZoD7/VVy4jhCgDcGXejmeeS7QiRrg0lqr8IAbD3sytXeMNzGfhKe2tnMTFsTkN4WlbDGveiIbhw0rgAAOmsuv1
+5JN+Eq+e/0wcy/WPFI6uXd9KmPWX5a269eOW0SPjGOF7czJVPWCXCDUfW7uECSEIlm+W5vkYS10+4vLc8yRczr0AqzGjMDR8wjfGpSm5iP7DC3q8X6o8NGIc
+ENBMfPqZmIxcI3FvYtsKB5OoYBNIMR4wZjPpS4i1R+kYjMMJnPpPoEwawxX3R1TLWMPA+onxTjcdGDC2tCxlW0y10867NSee/MvE9VE//EEyVzEbnHGFCbB1
+iIIxnelPf/pTMjfrgX86E9wSQgIIj1bsb7HF0AySjBp1c4Tbz4kXCeyQ7Sp3XyiX3T//ucyrP8ZJNWanFOwwEB43XsT78GHoYi2kNyEKlx94wFdyk+AWW2yd
+Lh1PyuSr8/n0F5+QAUIk2IZe3T3A0P5gWESCXJmNnTTCOnmGESSMJp/juFJig8EQAnJL6Ei+8qQdQ9IAGoZc7fmd9QRwk55+LOYzVsq6aW2CxC3TScxZE3qQ
+iAlE92oLOcbFCAbPtCrhdhEo4VsMCgaTkYcedkRo8HNjEecOaWmKCUrwaWlMRCD79l06tsVfHIsxR+QAthgf/JL+YhDMj8guTMhFdn+p0IomA+HJ5R5lYfVC
+waYeZWouD47knZ0gKScpK7351jvBAJvk71GhVA6O00vBgzHASVi1q2+UkGiVMDhGQldehqgWutekrgAEZQTfG21k5fbJyRMvv2y92Rvxf2owa7uO8l//olxf
+bz61w8ebU864OI///dGPjk6+QWd9x0/GJMYjlAoLgXcI1vhwJ8HL/RKmF8qHZxbeIlwTrCKOA2NKgZKyhk665NLLYg3kutkv/LbGmuvHa25GpQsuakeJGAvx
+UrjP+Ep/999//2ZMrGhngeFCWwRO/azVtttuk/UHJYKX2vPtwaovVvzMN6Vd/obXBeUEJ1KQEA5zQ3S5SCrn2vHh3cMECkLIWxGdo5nc99+FCST3aF2Mx2Uq
+BlQeINFUEhfDeDZ1amvi11ln7bwvigahEmRDPiT6jWmZY24UZlQfAqgbUkTICJF6EctAGZFWXXXV5usHHRKW5cFGYEEEitCDVT8hlxlnbdXp9zXX3RLW68jQ
+YDsnLDMzN4FhkSRl4IDmNPCtxI0aMeKqZoUVVwgNP6T5WGh5DCofhoUDuIJj9RUOq/zsvlmkjy63RvPaG2/HWX0bpiBcEqsLfhoKpRKBoQD1V9JfeDTrXys0
+3Le8hmXgaoIL/sAjcude9DLw2u79IRjcen2maH3Du3Hmrp/erPntDbfEoS3fbo4LYdI/iTVhDQQdKEj04wJ7zhVXJxeURYU3990j5NzhSvjg/PPPi9D7SXmY
+5jnn/LpZpu+SAccrAfOSYYV/G8I6Li2OSWZCjyaE0xhRG8Z/BNZmSm2wzISW0j3//PNTUI8/4aTmjJ+cG9M6/ZIvil/h791u7biccMElBdS9ZbzWzWoDBG8m
+YQGw0ELt6lhjh1dead0fHRL6XnTRBZOBlSFUNJTLb+Fy8w4LL+x42PbtBQDGjL7d8xuB+/fvp8pg9hg4huY0H4JBi+DWiEEEgAmI8jMzgfI0jzcrmL+pfNwT
+roGx1vjxTzWrrrx8amKTzISP4igF0qNHrKmKqJ1+v/zy5GazjdeNicf/STirPu2UQBHQCquDScIA5ookQnr55VfGLP9pTY84+PHtfzk7bUJaBAxr/Rni6BeX
+gxBL7n3Q5PX0Tzz+TDNkg5WDUZZPQTDAJpiFP+Ft7gqiV3u+qx35CAIPAKNVgheRLP0U+frGIYeGS3xCTBN8Mt0d5QmafsJj0fell16OMdMqzZVXj0w6/PD/
+hoflXCIFSX75yptgLassemB2V2fi2gsYsaSScY55Oe2ut966gbBJefY4RUEhNs0z4aHcFAGX/bJuXg+vhVtpPst/3okonkW81hdSzHixPBeBkU9uv11z2KHf
+blZdZblsF93Jinz6gCfgwH/fsfr79bQ0OkKyCIKlO2+84dXprbWgaWSGXN+uYkQdqP9cIo04QDBYLvMkFPEhj8Z7BNK5VKJhiy++WHPltXc0X95v/2Qm+15o
+LUleiem1grczeVYXOCT+NW1CSBFHovWZdQjo32+pZuxjjyeyMS94fOsTS4uR558/gh6x2/Ev997ZHH/c0dMZUp8kZUqQEAVRJfX4b6yGISVKgRB5WfBuO20Z
+d14Pgj2ZjFJLlqrednrhI1nug3wUDKYZFuq9cDNk3bWyX6Jt9Q6mqtt4idBKhVPlPa/LM67NY489Nj2Pe1xdsEnf+N+D4h23G8R4I9yZwFkbEnameht9pYDg
+Hk0FINZfb9VcsnPaaadnedvAW8vWrpgHiy3wdQ9M7hFsdRZdRQ15IZUoHYqVpRw4aGDexk9oh5d3/NQuzSmnnZkKkTurTyXsmJ71YfVYI5aQUqZsucnGVDwb
+7iQvZu+994jyTyRuKRt8QvjxSgkQ/mc1k0M0REp1gL/uoXERawM5OgdRvmk2zOf+/JGPufebtmGOPXe/hE4nJXlIvAGbt6v9M849u/Y3VzUnHHVwvBnv2Gwb
+I5a2AriOCefSqJ2pGMA3JpZoMm0IVlTSL+0jyiuhHHqFxppvvjgaOARNH8s0QyLYabTbbhsdk3fXhgZro2hVvzqLgf1WN/gkGg9DEGT1SmPG3BarDgblYfci
+f1yjsWMfy8Wr9vwQcAmDmWcZGP651NlG3nifD23FuoVmyaWWTCZkvWleST1OJoUTOH00GGpWddc99ON+oXHdI/C2X0sE7cenndjc/vtbmt6xkqV79/aQSrhE
+a/QiCCwSfFO+ywxYpRk46KNZ3pQA5pSUwbzczoK3aArf2i+8i6pxBbm+kmeWNI0PS7la1Pfzc84NT+SSqHO+fPUMfnz4oXtjfHRHWLGNA79/T7pqTx/ABk58
+rm00JKzm3QgYF7c2Mh500AHN2Efuj/LtYTLKgRP8+gsWcuE7XsbcTjzS2hJm1xihcs9vhZhE//3u1QtDztd4J+u777aWxpnfGkEIkiv5TYKVVU5+luiaa64I
+KV48GOjuCBwcFG22YypSzqpJQrDMucgRzVidyIcdHzrhGXj4uFYISOAeNmxYjEk2yHmim0ZeG+20Lwbg+hAil7KQgQAjLr843hh+XE5sqkPdMyd4kcw9OIVI
+IIR2MwiuPUoTJz7dnBCRwXXXGBzvK3o5iPZys96QdeNk10sSR5aoIKrgCOay5KhggcMPmmjh7be1ca9dNcAVU6dzDQQazNgbh9De++9/UGpRdVcf/K4+0qoU
+WS2ctRJAPRhMUobbfeJJJzfX/Pr2tDwEGX3LRfMN7xQSa/n0hIdijLFc0pTi0edKQus25KEbGhQc9bz+E3DjooLLc/yEZtKX9vlirs7o1WuBEKiL4/7rcUjk
+Js2PT/9p4LpbRPd2SpfVlApvgZvoG04EMQi48aCxtvk3sJjQdnjO6quvFng7IM7pezjga3FQMoC3XfDCQsVW8/YoLZoLgBiqtIEMMpNAHXPJx/0jGBBXwmbB
+qrySZxpgDSBK+JEr8GJoq6uvurU562dnR0TsvBiAr5/5lNOmLcTGNb4xu4gRl00qxOafmT6KMYxPRMpsnRDiFLpdffU1kpms3sak6nnxxfaNClxZ4z3If2Ts
+483nd9+7OeB/9s/aZ0VcD9ShPeMjbufwCNEbh5ivUkYaM2ZMM+GJOHAj5qHgixW0hu/CCy8LAp2d7pdBLxeFRqQw1DknQqQduF1xheUbRyzfEdrU6u2BYdlE
+tbi16BPVxkubv9fcdON1wYwPKzYdTr/hA9yEgiam7blE8CjIg4aeF473+eKwcBW5eG0ov+6rB/yUIXq/GpE8qe7Zyl3WWr3GsxSf9F60LXwOHTp0+hZ6giqs
+b4WChHe42Gf/7P81P/1/ZzY3j3IW4Xwh7Ffkhj+BBThm9QkyHjHm4+WwtqZn4EkAyPRKeT+ESj8I3cQJjyQe9NU9NAWbC37ITZdNNt1yKiZmRWhFiRaAEIkm
+8buEi+AAlAWKQ/Hyvufuq5glUQ/kyGeNkjTmlpsiYrZLM/z730nz6R4kQHQnIvm0td28kKUDnXmUrVRCqIO2iFuKxDpYIUyTMfGf2+PLzfrrrpbv8ZEfIriu
+yvTo4R2i3Ztbx9wUrtejoZkGJ1zV32pn5u+ZYUJgCseYYs+9vjgdZ3DimUSZ3DjyugjDfjvcib3CQrY7g4X+4Wpu0umn/yTmb86MMeASzVE//F66M+oB31/+
+cl8oo+/nZO1j4yY2hx/6P2EFDs5mZoa/2sZYXDy4xyCd+QrXf/7zvWFhdwiBGjI9QIOhJEqUAl44vJbRo2+MKN3RAcO3c+U5WgskCYFjbsIOP+7PKtUzPDUo
+xjbnnHNOhrTxpwhtpYLL/3vu+WNz+BHfifPQfxfzS1vnYZsTJz6V/bG1Y2gIJV4V8WRxnTXBxdNPgk2QjJH69l06ghlDwqvZt3nmWYfq9wjPol4y3XpAKUDB
+5wJrXT6+8dAUpLrJLBsv6URpEBKLGTAgADSK0SCPr0lwCBzGLOvlWffu3Zq/PTS2eWLcQ+G+/CKsyy7ZCeWlmYWjk2iev1c+z6RCICVw3HEnRlt/b755yMHN
+wNDK4Ln55tHNId/6YQjRygFbG7oHn3L64x2giwTBf//7Mam9TGIW8doW3v8TfAWzbwxhHsfWA0EGS0zAUYoK/vr0WTz8/VeaiZNeal587pFYrnJQTJS24f73
+b+3fn1a7F154UbP3sG81n91l83wh2Gd22i2UyIAYJI+LMeh1wfDrxUBaVLCdRL/qykvf01WuOqulWeGi8rCsImPbxpzc008/G3SNt6wHg8KtfsKFNYqjb/tL
+M+amyyICOCQtuLpZ4BK8qq/anPm7nhuHWqXO1eSloGM9U8YQI3g6eYoyu/jiS0N4vxKrM86OUPe+uTmRJbTaAU2Mmcw9SkcffUxY8yPyNyXSJWD31slzzzu/
+OfaEs5oN1l0xFF1rma1+dwwDmuovb0aUusu66204lcDoOK0oA4EgWC5zRiZWAQ2xJSzM4kvx/hshY4JUbiBB5LvzYVmhL/z3Ps23Djtk+kCzmD+hnsWHNgpB
+76WpFKt6vHn7sG8d3px7zq/iZJ2NmttuvcXTrHmJvis2Q9ZZJdd5ERzjP51nQf1fIiJAI0ff3Rx/9GGJdIWq7azgA35UGQz0jUMOa24YOTpcrkEBY3uwpuct
+g9kx7EyA3snMF//qlznhu+uuu8yRAAOrmPz6GBNuH1G5XXb9fMDergx5ZfJr0f5yqcgM+jEOl/3mUTfEMqXbcqxT5WfuovsSJTezonO/+ipCtt9Xvtb8/va7
+m0EDB6RHoywcsPYuzK5txxof8a3/DZfq41nnnyIoYm3gkd/+VrjI8DRjOKGNmVO12Xl/Vvc876zrzjvvinHQhtHW1eGubp+wd1p+q+VFBM1TWTp2cMw1Xjri
+5mbNNT7a3P+Xu5rBK6wZeByYARRyoW8srn4yLuSDQsDr3e0lctPs9FtvtZEIA0+ZNEpwjCUUnmGNnBH+z6ignQdQkbwQ171b7O+58754QfCTOfP8mU9/Khsr
+AgHo/dL7CU+VK2RxD7556BHNtdff0uyw4zbR0SmxfGSrUAbds9PBbhntoa3UC04CxDXwcuFnJz3fDNvzU+E3D8uq32VVqLU5TEVUs+JnnH5qs3UshDX2nDKl
+3bYP+RjKpCHEvxBvDKxWKio2h01Oz24xsEQLU2oLLBBvWwxllgPg6DM6UhwCRE2XpWJB6pgUpPfC83vdrwYJFwWhne8ceXizylpbx5sZe+f6Nq4PpVweDJ7g
+sSwWIfrddtu1qmi/u/SJcyTuCyt6QQ708cd7ta1Nz30Xrmcl5CrGX1WX9yeZEzo5zgE/59wLmg0/tl7shH0y+Xn3z382LaOAgwXGX/nqgc3fxz7Z/Ne2GwS+
+3shXfcKdiCu+phj0Cw+hJ+OBl/zmEXXZeJOh+aIxAOo4pNPcCrM4GMIzhMEELA5g+a0Y2mBdOZE3SPv9726JSNw3m/854GsZsdG56pjf85LA4YJwnT/gwG+E
+//ps06/vUrFDt13z1yPCoJhWh0UInQ2BuGCFCGFUUUBWk9t5803XxlxC+1rH2Qn57GA/5tgfhZY9Ig++ZO4hGLxwCUcSHCEQmLzMTNBBxGhOcVT5Hww/f8NN
+dmy23mJItBf7kATEw/poW57CGcJLzzz7fHPbmBtyoO3ZezFkZn6fj2r/yiuvanbd4+Bmh22HJANy555//oVkMnSCd20Yi8KDflNsr776SriEzzWvvzmlGTXy
+6lwiVgryfZqdo0ed9d0cgaFP5Hzk/M06667Z3Pvnu3Kz5rID+kaw5tZYJD0khMPb0dshQDu2bwUYvfSFbBAceKu6/YbbFCSCIyNhIWU6DlEWk9LcGFNBmpx5
+w5D+Cy9ikPnn9yqPsbEO6c3m7DNPyvAxpKlDXXNLrE6sAVhSl0Wk23/yc/EWuaVTWKwZA3/rx4YLGhqTUCzUa9r5edEPCazgtxXEoF8oVASnmCIzzeFHleXD
+D91yh3xvLmIUDgm0PPAKJ3A9efIrqRkH9O8Xe2iuDEvfO4kzJ3iqdp98ckKz6257xQsLvFql3cQGF4iPTpSfeTIKhJt+U+wf+u1vr49AwXbz1O8SQvg8+OBv
+NCOuur5ZafmP5pHUYNMuocIj+AcD4hk0oGSei52s9rXR+Pc+ML558C+jYxXBKvME06xIV3jyTCDlxxGcOeboH0aUdXvMFHOAT0U0b+kUIPRxoQO6+Ta/CHYW
+3zcPDX4LxxQWXstwCcsjo4KplYNpIYgQIQgmAJDKVQSJ7XhjgXj2dnPdtb+ODW+fbf58zy0R+fh0AlMdmBPmmBUi3CuiqYuPv96668b7d/rnRFlLpHZzFW3n
+Mg6CAALl1SPy6APGWjLc1htH/r457/xfpBCpG1LmNd16W2yZf/zBxJG+014ubfuuPmCmxRZbPP53i36s3bpc89C4ycLei8a6tHBnMS9GKKYFhz4/9VS7QNVY
+d/14Edd11/12miJsx75z0zxaqF9bhx76zehPWJ0QEnhn8XkuXMBSIJQ0/nGZu+QqcaFEx7bcbJ3m45t+srn/gQeSFur9sBLawr06RQytTLee8977H4nJ+2ci
+mvfRdLnxM0NB0PUNHsmCy9ymfqbARH2ekwu0lZKHIJ9LwOWQZPpXSFm5cCqRMTNHJRpSKW0zctQ9sYnv9Rhc39j8IF72ZLkFgCtvVjiPH+oDk0STDh26eb6m
+A3zOjdAZK3AriGDvCGadEorAPqJecTZzjokiP7/+iQkRBj78gGbPPXafR8hmuKxwdPHFl8WLpzdPzUbZgKu0G2KCAVzw3C3GkU89+VIQcbnE5bwAgqBLLLFY
+WBz7w15OwSE8FKF2CbF24dBYd7F4idrIm9pzMrSLVnOb1I8+DtC/4PyfRMj5hmCwduoDj8ABOLhJ+s8FAhshUpZQPf/8P9KlEhRaa82NYrX/Xz50YdL3gtXv
+bbfdpvnjXTeHVdo83Otr0mBw68gB2hF+sIKdFdNHF9gpLr8l3+IG3NaukKxjfSKCRSpJnfVwCkEEQmCI7nHffw3SJE5e+b/vHdhce82IZttttp4OKICL8eeW
+QFUOoOqThHlFXnbYcafUBjqIIQVGwIhJdNzbLiBigbBKtKBQPrjVxTdfJBTAwQd/PRnYvQ8DVis0TIraXAYWdbKMmAlBKCt4Nsnsvjk4pxJhQGlu4Ci4Kbb+
+MdHYtYsNk+3GO7gIEBIOCgYOwZLaNsZQr7zqrX33Ztvz+lFw4IHvfOd7zR133R/80s5LWnNJ0N8JV45rKTgFB/gMTricGNM0QYh0M2T9tWNX7lZ5mGUx/rzC
+11lenRQHfJsMtwjXGO/WW0eFNfx78MaMI7fk1bcWd22ovf0d+O7fP3EJn5QoVz6PLCY8GkAATGfXKN9PQYTABD0DITT6yBuuS0AsJ/nud4+MSbW++f/DtEI6
+zy3Qvk6f8ZOfxgLCvTIaBukCIJZ1eEaDgA9smLT3ou3B6Z7xxXWaIqApbxz5x+boo74fJn7JLKP+uU3VX8xx1dXXNKutvl4qIQwCDrgEW63ho31pZficEvDG
++To5ITm37Vc5tMOQk56Pw2dikxn6ga1nz9gwGYoFHfUfXNo2R7L0kmGVYoU0mOBA/rlNmA2uSe6BsVp++UHLpKJVrwl7CtncC4UiVIn5WCj3wVdKDiwmPTfe
+ZK3ctXpnrEQo+s8LfDP3q4QDzHC3006fyYn4Hbb/RKyAGREwcO1mWBt5uMQ8IHgEE0UNr2BGY7jNt1HokIfmGiRbdYtRPcOEQsuE6Jhjjo2JvitzCY68AFJ5
+aSb35jURDNaGdv/hD49uvnnECXlqDUroDCIUExjbgXXJJZdKYpllrrV570Y94Lci2ZKRyy47PXZnrpeMA0Hzkoq4Qt6XXnFT7JsanAKuTiFXcMqjD8lU8f8f
+cZ+VhMvV1mg3As4tDPBdMPTps3iua0NQwqr/2hVUca/wZdkOS77Qgj2bc35+Vq6M1n7VM7ew6B/FR6BPOeW43FUsigvH2gZLegmhjDGe3QXaLL7zDM3hxY6D
+DTfaLPZX7RyLSO9M3Mk7rzDO3DcwqxP/Ws1y0knH5yTuqJv+EIbE8qdeCXO5eDXE4ZoLLFEAaOA5xdm1VywudIN/OGXKu0mIBeZnhdq5AhXeMvqm1Ph2HB5x
+xOHpTgEAIAD6MFMrRG14/RvfOLTxot2ddtg0tYI2Id38EYZkbbil7tEQyaQR/Hj11dgO0j1WhgfzckUfeujR5vvD/y8OxGi3aHwY8Fa/r732uublF8blspra
+ts99of1pX8yCebg4NDTEK7vuOmtktG5eYCnm4qdXwoyIjYFpUrQFh3a9nduqbcIl3XHHnfktz7wmig88G8dK8x//+IxYmnN9MmMxXAmUaZPaaeAe/BB+gQDf
+Xbt2j+9Fmp123jRXrY+JHdOF6+rvvMJa5fVb3XgOjj7/+d1iPeKdzYrLL9dcecWl8czzNnKHpvqC15zlAZ+UpXLo2yVewjSVdqc1uCEemlhbMFylKcG4t8Ua
+tFNOPa3Ze689p2t6HQKEK37Gd2k1BOEmeNZ5r83jmfydqc3X5icotJiVuQcf/M2Y2L232WLzDXMgTUick7d4jOW0zxLJiyloC1qhiOK3unRw8mQv2F0gXwBt
+HAgRylUfCn4w/fvv6CNfRJektlv5E+LhyVyWNWmbbLrFNG06NQWmcMMizD//AsnYxijaxHCTnvtHjCu3aGx6AwtYlZnTVPgSxrfyfKOPbxp4aN9ACD51wwna
+StwTzIDO3N5u3bo0F114fiqjuYWhE+aCx1aKPff67zzhSESRkoYzys74qPBDCYITTBLayAdPGFh9t8R6vdGjb8lzGtyXWr7rZKSWf/LhLD5m0HpGednqvt/a
+kggWeH/1q4tj+/2Xmr79BsdGxdWThmCrPhYsBAqeu7oBscV8KkPTyWF+lx2wTKxsfjCWTnw9hagayoLRoA7pcPsttt5K+H/ea/PQkADtvNp77XP1WgP1+d2/
+0Fx7w63NumuvEh1oTzei4ftEYAFj0Ljgtr4L8sGOUQ1cPUcw990D28/OOiODKeBHuIK3E/7//B39m9Y3zzp/q0O68aZR+e2/tlxSuTLojkn0iyYm2G9FpPHP
+f7y9WWnFFfK+/PCRbWhnDi71SrR59Cz61vYXrrQLDxQOXGBgRKdB4a9Hj24xh3Xb9BXhcwtDJ7wFD4ty/HHHhFVsdxSAB4+ZLwODyz1MCSfgM7dGIVKMYGRZ
+F4qI63b/Fa+F2XrvXDdZbelz/W6/3XnvJI9UZSpn3fe/eBJMxtv77rtPLvrdeKMhsWnz0YCnXUsqH34DvyGPvhibdvdAYZXqkP/CeQ89Mi5CxLvlDDifEONC
+FOZopXHWs+KeFYDytr/zR5bL59M65TlV7x7kcdm+8tWDkmm329phiSyPhZ/tsnXtcweKCEK+mBiRlGdZaXzLbjDR3x8eGxZt4+xsbfjTpvZaYW/7rs8JSfjn
+AfF0mFrYMfe0PscPCxa15cTVK664OqzRVlmGELVIjfBuaH4BBeUJELyKLib+ovyQDTaLflh9YTFwe0Z2iwtQwFl+TmvX7xan6AQ+9RpL+M9ltCN10T7LJh5M
+cOofWAgTWPUPntponlUGcfZgBENWXX1gbstedbVVs19gqLa12rbLZWu1dd5JmlU+d1omLRhb3KLBUqEIV8+JZwN4cLb8046N0IqFBAd4LOvq3bt9GwqYaX8J
+nbbfbr04FWmrnGYRHdTGv6eWZokXD6Y9b/E0c95/Lznzv5IHbay11pox5jux2fHTuwYePxIKa+kcPqgXH4Ibn+HTLltsue1UpniRMMFvB8NKiOCaMOGp5v77
+7ok7sft1sV4R7VksGeCJcY83q8e+FZL5eCzPb6NQsRnvhfFNjwWWad6O17lEvJytjrLqDDesp8M+woS++XT8j9Q9dl5OsYkvTh9aeNlmicVjLdNbsbdkVYst
+vfjYXEzspwkadg/BxjRgorGKEUrbG1zrOE0rj7wIZOKvR4+PNPf88dFmkd4R1Zvf8qEp4bb2DKKFGxuC+chjE4LxLT7s0Tz3QpyYFANx48OXXmnD2Iv3XjCW
+Oi0b7uWLKRCYggA8Hau3N1p/1YQFrBhF2yyitsEE4c4DAG9p2lIEk2Kd36BBy+Z5A8o47ll+jGkuTBluISFk4VoF12OasmhdNsufMN3Eic9E/taVgwd4UV5b
+ymFK4w/Mqy1wglcihP37LRPPrMYIJRr1AYPrRzgwkHWYxlfqRBv01i4tTSHQzKzhfPFcYXhXL2/CGFUf4I1ChCsury0kYLV6GnwisVIxst/6ASfgEYX860OP
+Nbvtsn26osSDYJovNPa0RjJUcsKpDnW/i3lkjPIm58FeSVlrK7vFygvKDM+4VwLsW3557rjjrqQfhV2H7nguP7pqq8tmm38ix0i0PARLKtUJYcrS/pBJcN6I
+OSTIRjgMh6ns4e8VQQllxNRLgyEa4YR0nVww5jlyYWhoGR1wVhizCEBEhmiIeSMYAAzFPJ5zUcxF2JylEwl8IE8bCCy/+2AwntJJ7kx1mNtXE7iiePUbIcs1
+ND+mzxConP9LhxZSD+FQt2SlsN2+8wVzSODELBgXDsGpDv12X10sEg0sakYwwAx/AiLmW+RVBr5fiPLu8dXnC0UADu4GuOEJPNroGQy8aEywmpfStqQe8MAL
+HMGFOhHc/RKgsvT+q/+pEMYll1g8GVobyYGBX4fGcPHVJ4w9YEC/ZFZCjLaUl76ZMnkppyZiB3XwgrMv4MJz/ReIIczGRaUY3CcotDp8gA8tuHhwpS/oLp+6
+4Od3f3iwmfrW681Hwu1781V9fl63Q9EPjIUEMfn7xluxanuZ5rFxk0IbPBtP4tU0vfs0PQOe+YJn3wjBfzXf2/VOGIaFQ4H3bv4V9Hg7FOyT4/+adfUbsHKu
+FiHB+GqZvkskLuHx+efb9uADfA6geXXyq02X2Jab2yhkSimOgjpKGPzXCd8m2WgpndFpCLCOqhgew7z2Wrt7kG/baqH2pEoMKB9iAgDCWBuqwn3I044ykjbk
+ARMN4H+VlVd9rImDzFtGaDU25HOlfFd9OgtWdYNbX8AguY9h9IMLoUxpRvnd/0gwsrPMtFN1goFgUQJvhzYr+NSNCcEttRquPb/Ab33CNGA3YUw4qm3PWzwb
+fLcnvvo/JbZdcFdbwTMvNiDD6zT7m3G+OqvgSF0wgFnfs1z8xrQErpjUfc8xN8HTpv8JU9BXBFef6pk+sFJwBGdWkNRUCNzBhzoIq/us1vzRJ/BTtMY5eMlz
+OGOlKSqCCy7/JXCpp/Bb7buP7nhP2631ace44MFvEqXcNcqjtfxoUHymrp5xD77hATzacb+inS2/B49HHTMsXSwtC9yCCw7wVeEP/2nDf7SG926rrb7mcDc0
+4iFgPERgGTA6pIl4FQC+VQZByvrvG4IQr5OYkMCHVFfVj+i0rTpoV/Vr338XhtNuLUb1G4Ehk2aCBB1Wj7Y9R0B5wOG+OTH9UC94qk7PJcykrRI0z13ul9DO
+F+4G66MOhIFURGExivAVefJcfe5jTHWBwz3PME072dclrIjDQ1r3FTwUlIuVgEMwqadHuEsiqPJ+JFwsFvwfMQaCF220Fs0BMK31Y7nhB559gwHsLT3bvoEF
+zjz3rC448Uy92tamRcuY1LyPe5Ln6IWe6nG/mHORsDivB13V88wz7Qm7FBTlpD7aHEP6TTirHThSl377DacEj6Jp2+2S9+XBN7ajwEsKV+CNd4EnDFHkoaBf
+CqsGl6mUpj3Dr+AmGHDnm8UDr9+sqnqmxDOKcnJYGuxSSkM+sINRPWBjcCigbssNXmE4ZHoIuZLKJAhEBKb7zXDhuG18ZkuIIBTxdVo5k59WP2gIAUFQHcfo
+6vdt8hRAGAjDuiefqBjg6h6YdEA+SPWMVdCuMmDEXHWaTb65LRZOQgjNGD8Crna+RNvylyBrH4zq0T/t05SW2NBS+qM9bdNKtL4VASwDl0Zd704N5WHcEC6D
+PrsH3kJ6Ma9+qM99DKdNdeuHvrpX/cQY8AlOV/U13+QRFhwsU95pXwz8kZ7mhtr+6Y8+lNXEkOBx3zfrVX2XR9/NixBu6/MIIhj11wUeZSkSSd2EtPCGgQpm
+bVGGnvEy4IK7rG/6AvfabsdGcdJPKDX9t1IdHOpmaco6KO+CF3BpiwDAo9/cWZFPv7WtbrB7rm7tqNOYkxDpp2dgkR/c6tdP+cDggmvf8IXO2oYrl7bafrWv
+voQTefWxZ9CMIP9/0Wj1ClQSO/4AAAAASUVORK5CYII=`
diff --git a/integration/llm_test.go b/integration/llm_test.go
new file mode 100644
index 0000000..50249bf
--- /dev/null
+++ b/integration/llm_test.go
@@ -0,0 +1,47 @@
+//go:build integration
+
+package integration
+
+import (
+	"context"
+	"testing"
+	"time"
+
+	"github.com/ollama/ollama/api"
+)
+
+// TODO - this would ideally be in the llm package, but that would require some refactoring of interfaces in the server
+//        package to avoid circular dependencies
+
+var (
+	stream = false
+	req    = [2]api.GenerateRequest{
+		{
+			Model:  smol,
+			Prompt: "why is the ocean blue?",
+			Stream: &stream,
+			Options: map[string]any{
+				"seed":        42,
+				"temperature": 0.0,
+			},
+		}, {
+			Model:  smol,
+			Prompt: "what is the origin of the us thanksgiving holiday?",
+			Stream: &stream,
+			Options: map[string]any{
+				"seed":        42,
+				"temperature": 0.0,
+			},
+		},
+	}
+	resp = [2][]string{
+		{"sunlight", "scattering", "interact"},
+		{"england", "english", "massachusetts", "pilgrims"},
+	}
+)
+
+func TestIntegrationSimple(t *testing.T) {
+	ctx, cancel := context.WithTimeout(context.Background(), time.Second*120)
+	defer cancel()
+	GenerateTestHelper(ctx, t, req[0], resp[0])
+}
diff --git a/integration/max_queue_test.go b/integration/max_queue_test.go
new file mode 100644
index 0000000..7bb9336
--- /dev/null
+++ b/integration/max_queue_test.go
@@ -0,0 +1,115 @@
+//go:build integration
+
+package integration
+
+import (
+	"context"
+	"errors"
+	"log/slog"
+	"os"
+	"strconv"
+	"strings"
+	"sync"
+	"testing"
+	"time"
+
+	"github.com/stretchr/testify/require"
+
+	"github.com/ollama/ollama/api"
+)
+
+func TestMaxQueue(t *testing.T) {
+	if os.Getenv("OLLAMA_TEST_EXISTING") != "" {
+		t.Skip("Max Queue test requires spawning a local server so we can adjust the queue size")
+		return
+	}
+
+	// Note: This test can be quite slow when running in CPU mode, so keep the threadCount low unless your on GPU
+	// Also note that by default Darwin can't sustain > ~128 connections without adjusting limits
+	threadCount := 16
+	t.Setenv("OLLAMA_MAX_QUEUE", strconv.Itoa(threadCount))
+
+	req := api.GenerateRequest{
+		Model:  smol,
+		Prompt: "write a long historical fiction story about christopher columbus.  use at least 10 facts from his actual journey",
+		Options: map[string]any{
+			"seed":        42,
+			"temperature": 0.0,
+		},
+	}
+	resp := []string{"explore", "discover", "ocean"}
+
+	// CPU mode takes much longer at the limit with a large queue setting
+	ctx, cancel := context.WithTimeout(context.Background(), 5*time.Minute)
+	defer cancel()
+	client, _, cleanup := InitServerConnection(ctx, t)
+	defer cleanup()
+
+	require.NoError(t, PullIfMissing(ctx, client, req.Model))
+
+	// Context for the worker threads so we can shut them down
+	// embedCtx, embedCancel := context.WithCancel(ctx)
+	embedCtx := ctx
+
+	var genwg sync.WaitGroup
+	genwg.Add(1)
+	go func() {
+		defer genwg.Done()
+		slog.Info("Starting generate request")
+		DoGenerate(ctx, t, client, req, resp, 45*time.Second, 5*time.Second)
+		slog.Info("generate completed")
+	}()
+
+	// Give the generate a chance to get started before we start hammering on embed requests
+	time.Sleep(10 * time.Millisecond)
+
+	threadCount += 10 // Add a few extra to ensure we push the queue past its limit
+	busyCount := 0
+	resetByPeerCount := 0
+	canceledCount := 0
+	successCount := 0
+	counterMu := sync.Mutex{}
+	var embedwg sync.WaitGroup
+	for i := 0; i < threadCount; i++ {
+		embedwg.Add(1)
+		go func(i int) {
+			defer embedwg.Done()
+			slog.Info("embed started", "id", i)
+			embedReq := api.EmbeddingRequest{
+				Model:   req.Model,
+				Prompt:  req.Prompt,
+				Options: req.Options,
+			}
+			// Fresh client for every request
+			client, _ = GetTestEndpoint()
+
+			resp, genErr := client.Embeddings(embedCtx, &embedReq)
+			counterMu.Lock()
+			defer counterMu.Unlock()
+			switch {
+			case genErr == nil:
+				successCount++
+				require.Greater(t, len(resp.Embedding), 5) // somewhat arbitrary, but sufficient to be reasonable
+			case errors.Is(genErr, context.Canceled):
+				canceledCount++
+			case strings.Contains(genErr.Error(), "busy"):
+				busyCount++
+			case strings.Contains(genErr.Error(), "connection reset by peer"):
+				resetByPeerCount++
+			default:
+				require.NoError(t, genErr, "%d request failed", i)
+			}
+
+			slog.Info("embed finished", "id", i)
+		}(i)
+	}
+	genwg.Wait()
+	slog.Info("generate done, waiting for embeds")
+	embedwg.Wait()
+
+	slog.Info("embeds completed", "success", successCount, "busy", busyCount, "reset", resetByPeerCount, "canceled", canceledCount)
+	require.Equal(t, resetByPeerCount, 0, "Connections reset by peer, have you updated your fd and socket limits?")
+	require.True(t, busyCount > 0, "no requests hit busy error but some should have")
+	require.True(t, canceledCount == 0, "no requests should have been canceled due to timeout")
+
+}
diff --git a/integration/model_arch_test.go b/integration/model_arch_test.go
new file mode 100644
index 0000000..6ce183d
--- /dev/null
+++ b/integration/model_arch_test.go
@@ -0,0 +1,184 @@
+//go:build integration && models
+
+package integration
+
+import (
+	"context"
+	"encoding/json"
+	"fmt"
+	"io/ioutil"
+	"log/slog"
+	"os"
+	"path/filepath"
+	"strconv"
+	"strings"
+	"testing"
+	"time"
+
+	"github.com/ollama/ollama/api"
+	"github.com/ollama/ollama/format"
+)
+
+var (
+	started    = time.Now()
+	chatModels = []string{
+		"granite3-moe:latest",
+		"granite-code:latest",
+		"nemotron-mini:latest",
+		"command-r:latest",
+		"gemma2:latest",
+		"gemma:latest",
+		"internlm2:latest",
+		"phi3.5:latest",
+		"phi3:latest",
+		// "phi:latest", // flaky, sometimes generates no response on first query
+		"stablelm2:latest", // Predictions are off, crashes on small VRAM GPUs
+		"falcon:latest",
+		"falcon2:latest",
+		"minicpm-v:latest",
+		"mistral:latest",
+		"orca-mini:latest",
+		"llama2:latest",
+		"llama3.1:latest",
+		"llama3.2:latest",
+		"llama3.2-vision:latest",
+		"qwen2.5-coder:latest",
+		"qwen:latest",
+		"solar-pro:latest",
+	}
+)
+
+func TestModelsGenerate(t *testing.T) {
+	softTimeout, hardTimeout := getTimeouts(t)
+	slog.Info("Setting timeouts", "soft", softTimeout, "hard", hardTimeout)
+	ctx, cancel := context.WithTimeout(context.Background(), hardTimeout)
+	defer cancel()
+	client, _, cleanup := InitServerConnection(ctx, t)
+	defer cleanup()
+
+	// TODO use info API eventually
+	var maxVram uint64
+	var err error
+	if s := os.Getenv("OLLAMA_MAX_VRAM"); s != "" {
+		maxVram, err = strconv.ParseUint(s, 10, 64)
+		if err != nil {
+			t.Fatalf("invalid  OLLAMA_MAX_VRAM %v", err)
+		}
+	} else {
+		slog.Warn("No VRAM info available, testing all models, so larger ones might timeout...")
+	}
+
+	for _, model := range chatModels {
+		t.Run(model, func(t *testing.T) {
+			if time.Now().Sub(started) > softTimeout {
+				t.Skip("skipping remaining tests to avoid excessive runtime")
+			}
+			if err := PullIfMissing(ctx, client, model); err != nil {
+				t.Fatalf("pull failed %s", err)
+			}
+			if maxVram > 0 {
+				resp, err := client.List(ctx)
+				if err != nil {
+					t.Fatalf("list models failed %v", err)
+				}
+				for _, m := range resp.Models {
+					if m.Name == model && float32(m.Size)*1.2 > float32(maxVram) {
+						t.Skipf("model %s is too large for available VRAM: %s > %s", model, format.HumanBytes(m.Size), format.HumanBytes(int64(maxVram)))
+					}
+				}
+			}
+			// TODO - fiddle with context size
+			req := api.GenerateRequest{
+				Model:  model,
+				Prompt: "why is the sky blue?",
+				Options: map[string]interface{}{
+					"temperature": 0,
+					"seed":        123,
+				},
+			}
+			anyResp := []string{"rayleigh", "scattering", "atmosphere", "nitrogen", "oxygen"}
+			DoGenerate(ctx, t, client, req, anyResp, 120*time.Second, 30*time.Second)
+		})
+	}
+}
+
+func TestModelsEmbed(t *testing.T) {
+	softTimeout, hardTimeout := getTimeouts(t)
+	ctx, cancel := context.WithTimeout(context.Background(), hardTimeout)
+	defer cancel()
+	client, _, cleanup := InitServerConnection(ctx, t)
+	defer cleanup()
+
+	// TODO use info API eventually
+	var maxVram uint64
+	var err error
+	if s := os.Getenv("OLLAMA_MAX_VRAM"); s != "" {
+		maxVram, err = strconv.ParseUint(s, 10, 64)
+		if err != nil {
+			t.Fatalf("invalid  OLLAMA_MAX_VRAM %v", err)
+		}
+	} else {
+		slog.Warn("No VRAM info available, testing all models, so larger ones might timeout...")
+	}
+
+	data, err := ioutil.ReadFile(filepath.Join("testdata", "embed.json"))
+	if err != nil {
+		t.Fatalf("failed to open test data file: %s", err)
+	}
+	testCase := map[string][]float64{}
+	err = json.Unmarshal(data, &testCase)
+	if err != nil {
+		t.Fatalf("failed to load test data: %s", err)
+	}
+	for model, expected := range testCase {
+
+		t.Run(model, func(t *testing.T) {
+			if time.Now().Sub(started) > softTimeout {
+				t.Skip("skipping remaining tests to avoid excessive runtime")
+			}
+			if err := PullIfMissing(ctx, client, model); err != nil {
+				t.Fatalf("pull failed %s", err)
+			}
+			if maxVram > 0 {
+				resp, err := client.List(ctx)
+				if err != nil {
+					t.Fatalf("list models failed %v", err)
+				}
+				for _, m := range resp.Models {
+					if m.Name == model && float32(m.Size)*1.2 > float32(maxVram) {
+						t.Skipf("model %s is too large for available VRAM: %s > %s", model, format.HumanBytes(m.Size), format.HumanBytes(int64(maxVram)))
+					}
+				}
+			}
+			req := api.EmbeddingRequest{
+				Model:  model,
+				Prompt: "why is the sky blue?",
+				Options: map[string]interface{}{
+					"temperature": 0,
+					"seed":        123,
+				},
+			}
+			resp, err := client.Embeddings(ctx, &req)
+			if err != nil {
+				t.Fatalf("embeddings call failed %s", err)
+			}
+			if len(resp.Embedding) == 0 {
+				t.Errorf("zero length embedding response")
+			}
+			if len(expected) != len(resp.Embedding) {
+				expStr := make([]string, len(resp.Embedding))
+				for i, v := range resp.Embedding {
+					expStr[i] = fmt.Sprintf("%0.6f", v)
+				}
+				// When adding new models, use this output to populate the testdata/embed.json
+				fmt.Printf("expected\n%s\n", strings.Join(expStr, ", "))
+				t.Fatalf("expected %d, got %d", len(expected), len(resp.Embedding))
+			}
+			sim := cosineSimilarity(resp.Embedding, expected)
+			if sim < 0.99 {
+				t.Fatalf("expected %v, got %v (similarity: %f)", expected[0:5], resp.Embedding[0:5], sim)
+			}
+		})
+	}
+
+}
diff --git a/integration/quantization_test.go b/integration/quantization_test.go
new file mode 100644
index 0000000..af9da0b
--- /dev/null
+++ b/integration/quantization_test.go
@@ -0,0 +1,130 @@
+//go:build integration && models
+
+package integration
+
+import (
+	"bytes"
+	"context"
+	"fmt"
+	"log/slog"
+	"strings"
+	"testing"
+	"time"
+
+	"github.com/ollama/ollama/api"
+)
+
+func TestQuantization(t *testing.T) {
+	sourceModels := []string{
+		"qwen2.5:0.5b-instruct-fp16",
+	}
+	quantizations := []string{
+		"Q8_0",
+		"Q4_K_S",
+		"Q4_K_M",
+		"Q4_K",
+	}
+	softTimeout, hardTimeout := getTimeouts(t)
+	started := time.Now()
+	slog.Info("Setting timeouts", "soft", softTimeout, "hard", hardTimeout)
+	ctx, cancel := context.WithTimeout(context.Background(), hardTimeout)
+	defer cancel()
+	client, _, cleanup := InitServerConnection(ctx, t)
+	defer cleanup()
+
+	for _, base := range sourceModels {
+		if err := PullIfMissing(ctx, client, base); err != nil {
+			t.Fatalf("pull failed %s", err)
+		}
+		for _, quant := range quantizations {
+			newName := fmt.Sprintf("%s__%s", base, quant)
+			t.Run(newName, func(t *testing.T) {
+				if time.Now().Sub(started) > softTimeout {
+					t.Skip("skipping remaining tests to avoid excessive runtime")
+				}
+				req := &api.CreateRequest{
+					Model:        newName,
+					Quantization: quant,
+					From:         base,
+				}
+				fn := func(resp api.ProgressResponse) error {
+					// fmt.Print(".")
+					return nil
+				}
+				t.Logf("quantizing: %s -> %s", base, quant)
+				if err := client.Create(ctx, req, fn); err != nil {
+					t.Fatalf("create failed %s", err)
+				}
+				defer func() {
+					req := &api.DeleteRequest{
+						Model: newName,
+					}
+					t.Logf("deleting: %s -> %s", base, quant)
+					if err := client.Delete(ctx, req); err != nil {
+						t.Logf("failed to clean up %s: %s", req.Model, err)
+					}
+				}()
+				// Check metadata on the model
+				resp, err := client.Show(ctx, &api.ShowRequest{Name: newName})
+				if err != nil {
+					t.Fatalf("unable to show model: %s", err)
+				}
+				if !strings.Contains(resp.Details.QuantizationLevel, quant) {
+					t.Fatalf("unexpected quantization for %s:\ngot: %s", newName, resp.Details.QuantizationLevel)
+				}
+
+				stream := true
+				genReq := api.GenerateRequest{
+					Model:     newName,
+					Prompt:    "why is the sky blue?",
+					KeepAlive: &api.Duration{Duration: 3 * time.Second},
+					Options: map[string]any{
+						"seed":        42,
+						"temperature": 0.0,
+					},
+					Stream: &stream,
+				}
+				t.Logf("verifying: %s -> %s", base, quant)
+
+				// Some smaller quantizations can cause models to have poor quality
+				// or get stuck in repetition loops, so we stop as soon as we have any matches
+				anyResp := []string{"rayleigh", "scattering", "day", "sun", "moon", "color", "nitrogen", "oxygen"}
+				reqCtx, reqCancel := context.WithCancel(ctx)
+				atLeastOne := false
+				var buf bytes.Buffer
+				genfn := func(response api.GenerateResponse) error {
+					buf.Write([]byte(response.Response))
+					fullResp := strings.ToLower(buf.String())
+					for _, resp := range anyResp {
+						if strings.Contains(fullResp, resp) {
+							atLeastOne = true
+							t.Log(fullResp)
+							reqCancel()
+							break
+						}
+					}
+					return nil
+				}
+
+				done := make(chan int)
+				var genErr error
+				go func() {
+					genErr = client.Generate(reqCtx, &genReq, genfn)
+					done <- 0
+				}()
+
+				select {
+				case <-done:
+					if genErr != nil && !atLeastOne {
+						t.Fatalf("failed with %s request prompt %s ", genReq.Model, genReq.Prompt)
+					}
+				case <-ctx.Done():
+					t.Error("outer test context done while waiting for generate")
+				}
+
+				t.Logf("passed")
+
+			})
+		}
+	}
+}
diff --git a/integration/testdata/embed.json b/integration/testdata/embed.json
new file mode 100644
index 0000000..a6ef65a
--- /dev/null
+++ b/integration/testdata/embed.json
@@ -0,0 +1,21 @@
+{
+    "all-minilm:latest": [0.010071031, -0.0017594865, 0.050072223, 0.046929732, 0.05491682, 0.008599705, 0.105441436, -0.025878143, 0.1295813, 0.031952355, -0.04448072, -0.0089852745, -0.000509909, -0.06374169, -0.016089523, 0.04662509, -0.022060998, -0.15813895, -0.072848774, -0.061321855, -0.065877646, 0.054177605, -0.06213012, 0.038908366, -0.04580116, 0.05493584, -0.035267256, 0.012613296, 0.04251382, -0.007927403, -0.01902945, 0.060983833, 0.036926776, 0.013464811, -0.025808964, -0.043487485, 0.072623335, -0.04850803, 0.00428558, -0.02943825, -0.02913489, -0.03290691, -0.018345183, 0.0155583285, -0.011713048, 0.01530367, -0.009391865, 0.025963927, 0.09527476, -0.015497632, -0.024581224, 0.009084283, -0.07661165, 0.015987588, 0.049554788, 0.115980916, 0.0009802427, -0.02031978, 0.09233272, 0.00849488, -0.05705784, 0.068866335, -0.076607056, 0.06931919, 0.09223656, -0.055486195, -0.053620946, 0.008443246, -0.06315959, -0.066396914, -0.02516728, 0.018891005, 0.061389998, -0.028247874, 0.036244337, 0.0011042351, 0.06067215, -0.06755123, -0.008126048, -0.012737444, 0.030953258, -0.06380051, -0.07451028, 0.1191656, 0.012553826, 0.06532671, 0.014824665, 0.051425762, -0.08518537, 0.010257597, -0.0077732494, -0.035585348, -0.115389846, -0.03066639, -0.0832527, 0.013689985, 0.056588713, -0.040882625, 0.042672798, 0.022154681, 0.04685385, -0.05135596, 0.030175874, 0.007199854, -0.0041790465, -0.031146567, 0.07788334, 0.034205843, 0.06138031, 0.007510951, -0.036251485, -0.08457674, 0.021795211, -0.019397866, -0.03984967, 0.054795727, -0.033695232, 0.018102817, -0.10553994, -0.050397146, -0.011542906, 0.0378195, 0.022170838, 0.08049212, 0.007816837, -0.01683443, -0.059413332, -7.227309e-33, 0.13531439, -0.011213897, 0.0923026, 0.03597459, 0.039638437, -0.054985173, -0.03506899, -0.0037263383, -0.01955998, -0.034966808, -0.0057084337, -0.014629069, -0.024276787, -0.048383784, 0.04777095, -0.017076956, -0.06094759, 0.0059446157, -0.083057985, 0.084341705, -0.1046656, 0.041639294, -0.03668315, -0.008083383, -0.028216336, -0.04319357, 0.035999607, 0.07498755, 0.05645381, 0.011849057, 0.09846523, 0.10484252, -0.021864949, 0.045994766, -0.026346037, -0.05092382, -0.014708711, -0.0063834875, -0.085867085, 0.028602734, -0.0535738, 0.056528863, -0.059763853, 0.012410302, 0.06620772, -0.013472636, 0.038324803, -0.08890202, -0.05744544, 0.03199372, -0.034495477, 0.02363032, 0.014458106, -0.04159657, 0.06799366, 0.031207295, 0.069696635, -0.035037853, -0.0033100948, 0.0493309, -0.0133445235, -0.0034971808, 0.050776623, 0.078672916, 0.037620574, -0.011580864, 0.03812419, 0.04201406, -0.012800006, -0.07894726, 0.00902281, 0.013365969, 0.024159499, 0.009777319, -0.010906574, -0.08161233, 0.026987134, -0.0296618, -0.004335468, 0.013011258, -0.035210665, -0.019684888, 0.055351324, -0.06124218, -0.055006765, 0.012528419, -0.019175794, -0.012560324, -0.015807373, -0.06942039, -0.044893157, -0.048941795, 0.048249032, -0.10446324, -0.10786195, 3.58774e-33, -0.0004694524, -0.08636079, -0.087853074, 0.0071707284, -0.007416128, -0.01662082, 0.045272738, 0.06750471, -0.042886123, 0.08635933, 0.04555289, 0.06798365, 0.009930444, -0.003040414, 0.058509175, -0.035567205, 0.036180507, 0.06615616, -0.03779808, -0.062269486, -0.044531893, 0.07724946, 0.04343241, -0.021267718, -0.021633657, 0.06227748, -0.03914136, 0.028114952, -0.013057723, 0.051113747, -0.036822543, 0.054577183, -0.06644743, 0.022884717, 0.0048167957, 0.09043401, 0.0051002423, -0.083096094, -0.055096727, 0.07315016, -0.11049671, -0.020257315, 0.11254063, -0.053299136, -0.057593238, -0.023905706, 0.056623034, 0.12725255, 0.03595934, -0.043950673, 0.017003251, -0.024837377, 0.07269714, 0.043164223, 0.08047665, -0.019504813, -0.034397744, 0.096689135, 0.051885936, 0.010750518, 0.04023374, 0.0021946214, -0.0075854477, 0.0016714911, 0.014185944, 0.020396275, -0.023103109, 0.021491585, -0.009236667, -0.050526038, -0.016258504, -0.0899585, -0.0606858, 0.08100888, 0.0024563652, 0.041595213, 0.043729555, -0.025168482, -0.09529981, 0.088698424, -0.09840905, -0.0048626475, 0.03534257, 0.014159388, -0.06457741, -0.07597705, 0.012412196, -0.050220776, -0.055758025, -0.0569825, -0.018489538, -0.0021951278, -0.002204297, 0.03527849, -0.0547162, -1.430923e-8, -0.007930172, 0.026702108, 0.0022585324, 0.010008593, -0.021680027, -0.02156696, 0.111389145, 0.004639639, 0.03784025, 0.003962226, -0.0668973, -0.028295087, -0.04432231, 0.07120314, 0.018729135, -0.04907397, -0.103948705, -0.043614738, 0.010182222, 0.04179206, -0.013543455, -0.03385163, -0.025069695, -0.013597015, 0.0034274007, 0.033077475, -0.021843424, 0.021919321, 0.07144483, 0.020509098, 0.024436586, 0.035892475, -0.00094983797, -0.061337028, -0.085383, 0.007424564, -0.038788088, 0.07989341, -0.025575982, -0.060451094, 0.060581867, 0.082356565, -0.056705453, 0.0048461547, 0.04513215, 0.023778366, 0.043513518, 0.09104256, -0.05140235, -0.01123021, -0.06885336, 0.007250856, 0.072830714, -0.04336812, 0.025920171, -0.11409155, -0.009537421, 0.022203108, 0.026747186, 0.0037276533, 0.015937949, 0.0035980998, -0.020675266, 0.03354611],
+   	"nomic-embed-text:latest": [0.219890, 1.006650, -3.181164, 0.029981, 0.726579, 2.426980, -0.191078, 0.228469, 0.011652, -0.803823, 0.760813, 1.403515, 2.321094, 1.939129, 0.535624, 0.758475, -0.759854, -0.421067, 1.084963, -0.613761, -1.276195, -0.985036, 0.374207, -0.793580, 1.439871, 0.977758, 0.757015, -0.008899, 0.003841, -0.427342, 1.312180, 0.052406, 0.410700, -0.845589, 0.751114, -1.348898, 1.511959, 0.609853, 0.143546, -0.378520, 0.042813, -0.799669, -0.247855, 0.195135, 0.518717, -1.124939, 0.347576, 1.137096, -0.513775, -1.149473, -0.907078, 1.328627, 0.048019, -1.612557, 0.672948, 0.868539, 1.446252, -0.684429, -0.518347, 0.605635, 1.063686, 1.819040, 1.089479, 1.884122, 0.897270, -1.130091, -1.021317, -0.127456, 0.621352, -0.167290, 1.033687, -1.566117, 0.419836, 1.026150, -0.951560, -0.839969, -1.202380, 0.478040, 0.604218, 1.149539, 0.671882, 0.591803, 0.014369, 0.109148, 0.392492, 0.650537, 0.601781, -0.054198, -0.514124, -0.200478, -0.070399, -0.765290, 1.083515, -0.298463, -0.518189, 0.236548, -0.231565, 1.491937, -1.257157, 0.792882, -1.845886, -1.293818, -0.826941, 0.926363, 1.168679, 0.903380, -0.102102, -0.215274, -1.542697, -0.684560, 0.077517, 0.911539, 0.066066, 0.454660, 0.110756, -0.540284, 0.201825, -1.294326, 0.293775, 1.582082, 0.200911, -0.214661, -0.023139, -1.026803, -0.381844, 0.677617, -0.001489, -0.157799, -0.291094, -1.359254, -0.610095, -0.474374, -0.090075, 0.244272, 0.203410, 0.349862, 0.552556, -1.036944, 0.417180, 1.067964, 0.747442, 0.174783, 1.166191, -0.899744, -0.333160, -1.060964, 0.078359, 0.141430, -0.094824, -0.949449, 0.026022, -0.243540, 0.701775, 0.491909, 0.045078, -0.995680, 0.283707, 0.065462, 0.794785, 1.095672, 2.038253, 0.083903, -0.467894, 0.062039, 0.228742, -0.634436, 0.619659, 0.316091, 0.792796, 0.788598, -1.048457, -0.882282, -0.982587, -0.831069, 0.529162, 0.448449, 1.215543, 0.376655, 1.034972, -0.668359, -0.252735, -1.075730, 0.337637, -0.036745, 0.390547, -0.058036, -1.350992, -1.708233, -0.556057, 0.044467, 0.520886, -0.245460, -1.592574, -0.863165, -0.021705, -1.459971, 0.683155, 1.406411, 0.163736, 0.180574, 0.100544, -0.014702, 0.377109, 0.016539, -0.398145, 1.211886, 0.037277, 1.442719, -0.855023, 0.190181, 1.251192, -0.199667, 0.225910, 0.123415, 0.975767, 0.038910, -1.250062, -0.865632, -0.000453, 1.082752, -0.461130, -0.045185, 0.778833, -0.231451, 0.767889, 0.950657, -1.264322, 0.346756, 0.617611, 0.078424, -0.283523, -0.291005, 0.769571, 1.392794, 0.306581, 0.815418, 1.259069, 1.394830, 0.260644, -0.147373, -1.060894, 0.239904, -0.865291, 0.145390, -0.571270, 0.855709, -0.666148, -0.941604, -1.209969, 0.443706, 0.189631, 0.212230, 0.037760, 1.229048, 0.566351, -1.335600, -1.358111, 0.015296, -0.023257, -0.707290, -0.439269, -1.830789, 1.109338, -1.197248, -0.967892, 1.273458, -1.548987, -0.281183, 0.699614, -1.693262, 0.085893, 0.196678, -0.687848, -0.028304, 0.058492, -0.596301, 0.557499, 0.004753, -0.137099, 0.532887, -0.789263, -0.033352, -1.557578, 0.031022, 0.143313, 0.782732, -0.167772, 1.441613, 0.481370, 0.153674, 1.275666, 0.368378, 0.219889, 1.908404, -0.539913, -0.220301, 0.835939, 0.074274, 0.168754, -0.670483, 0.250074, 0.250606, 1.282066, 0.122452, -0.409138, 0.477485, 0.665960, 0.264513, 0.244462, -0.766134, -0.387109, 1.491521, -0.595464, 1.445272, -1.341694, 1.346380, -0.015559, 0.169699, 0.473802, -0.251522, 0.128870, -0.452314, -0.106985, 0.357785, 0.768601, -0.186836, -0.733263, 0.532873, -0.046556, -1.379303, -0.328394, 0.724661, -0.046880, -0.420221, -1.176509, 0.077040, -0.160980, -1.457012, -0.374469, 0.360446, 1.199879, -0.950283, -0.311770, -0.861221, -0.326219, -0.886959, -0.019173, -0.804603, -0.145110, 0.172603, -0.111011, -0.129521, 0.241285, 0.170361, -0.443625, -0.021753, 0.072633, 0.563395, 0.942547, -0.110138, 0.532285, -0.431836, -0.212545, 0.241099, -0.942158, 0.600449, -0.645427, -0.361387, -0.532849, -0.328790, -0.359031, 0.155564, -1.010012, 0.878506, 1.179544, 0.451060, -0.587246, -1.473734, 0.742507, -0.152667, -0.089154, 0.725924, 0.448145, 1.327408, 0.481748, 0.517749, -0.439087, -0.767327, -0.317562, -0.963798, -0.155295, -1.611076, -0.736760, -0.141913, 0.696455, -0.501021, 1.009650, -0.417100, 0.248805, -0.218522, -0.882274, -0.762248, -0.474836, -1.111332, -0.472195, 1.026389, -0.091083, -1.468349, 1.588396, 1.459536, 0.872679, 1.189462, -0.020465, -2.652960, 0.836097, -0.108802, 0.503339, 0.487444, 0.288796, -0.149230, 1.723669, 1.326712, 0.150303, -0.715567, 1.521041, 0.839058, -0.236126, 0.348449, -0.189743, -0.999338, 0.454978, -0.113078, -0.355967, 0.219733, -1.678462, 0.925221, 0.281349, 0.502747, 0.485526, 2.215123, 1.225689, -1.572290, -0.183297, -1.053326, 1.243057, 1.127326, 0.425482, -1.167636, -1.324311, 0.328010, -0.564570, -1.026676, 0.735754, 1.924423, 0.977113, -0.185825, -0.304335, 0.084210, 1.203354, 1.377207, 1.050977, -1.086157, -1.124066, 0.352433, -0.216424, 0.038128, -0.433892, -0.006419, 0.739968, 0.963349, -0.676059, 0.439721, 0.450345, -0.276241, -1.303619, -0.198606, -0.970008, -0.201385, 1.530513, 0.599346, 0.570385, 0.476559, -0.374782, -0.297255, -0.753806, 1.497056, 0.300627, -0.404046, 0.568865, -0.990389, 0.425690, -0.933574, 1.065915, -0.385244, -0.177116, -1.063580, 0.178428, 0.058992, 0.827651, -0.473994, 0.331265, 0.657876, 0.640843, 0.698361, 0.549673, 0.232483, 0.668623, -1.197152, 0.480929, 1.240717, -0.745810, -1.905026, 0.767095, 0.103786, 1.170524, -0.952408, -0.066748, 0.424861, -0.136255, -0.025224, -0.052945, -0.597771, -0.454715, 0.413931, -1.125364, 0.801601, 0.484465, -0.863046, 0.302083, -0.079536, 0.068875, 0.691379, -0.627793, -0.914694, 0.324031, -0.494108, -0.020750, 1.394933, -0.204252, 1.174026, 0.317049, -0.210559, -0.029823, -0.078657, 0.032332, 0.314897, -1.028550, 0.739355, 1.110153, -0.692912, -0.101564, -0.972804, 0.442737, 0.069138, 0.823943, -0.751347, -0.623284, 0.416302, -0.447849, -0.865251, 0.737254, 0.175767, -0.142116, 0.411459, 1.320562, -0.663323, -0.096259, 0.933196, 1.470701, 0.054877, -0.011785, 0.194116, 0.802962, -0.041178, -0.537894, 0.249019, -0.221067, 0.309599, -1.719162, -0.104423, 0.930303, -0.444503, 0.571955, -0.573695, 0.011215, -0.108491, 0.195606, -1.299110, 1.023715, -0.983640, 0.057186, -0.391953, -0.187306, -0.730985, -0.521627, -0.948246, -0.939280, -0.765175, -1.353228, -1.173991, 0.427141, -1.228944, 1.202880, -0.772638, 0.416629, 0.898097, 0.380402, -0.805034, 0.552129, -0.475897, 0.037261, -0.770310, -0.356166, 0.235771, 0.062039, -0.848198, 1.381184, 0.260680, -0.442387, -1.896000, -0.225495, -1.096121, 0.587856, -0.626971, 1.399457, -0.922987, -1.204145, -0.282807, 0.686579, 1.160958, -0.841193, 1.676645, -1.138743, -0.556020, 0.221771, 0.289905, -0.421652, -0.062543, -0.244776, -0.021300, -0.304124, -0.098590, 0.832550, -0.258847, 0.612432, -0.492680, 0.533596, -0.473908, 0.358118, -0.810444, 0.516658, 0.936809, 0.800127, -1.173993, 0.241092, -0.648681, -0.329719, -0.172401, -0.325285, -2.102810, -0.545753, -0.375017, -2.974661, 0.398467, 1.105510, -0.362389, 0.570450, -0.183192, -1.592220, -0.146791, -0.998270, 1.052159, -1.014563, -0.357075, -0.615475, 0.066042, -0.050603, 1.268924, 1.111394, -0.442363, 0.936902, 0.695246, 0.272335, -0.646006, -1.184614, -0.703524, 0.120704, -0.054029, -0.235635, -2.374027, 0.342080, 0.635537, -1.104060, -1.589049, 0.146446, -0.232117, 0.118681, -0.579416, 0.564839, -0.180279, -1.186865, -0.502420, -0.399287, 1.200750, 0.001391, -0.607492, 0.867889, 0.194823, -0.919726, -0.121487, -0.475471, 0.250438, 0.642583, 0.490459, 0.239351, -0.088616, 0.611746, -0.792151, -1.681509, 0.930524, 1.242115, -0.718563, -0.762689, -1.128326, -0.655312, 0.627445, 0.782836, -1.778404, -0.274353, -0.421497, -0.621240, 0.846464, 0.504188, 0.198827, -1.123407, -1.060399, -0.680443, -0.109405, 0.383880, -0.214442, -1.240156, 0.354756, 0.626841, 0.193950, -0.282054, -0.492465, 0.035170, 0.907206, 1.234352, -0.500964, 0.589432, 0.227899, -0.741226, 0.035833, 0.232761, 0.758834, 0.788271, -0.437591, 2.053334, -0.016972, 0.417164, -0.289397, 0.455565, 0.552883, -0.777599, -0.217818, -0.943074, 0.460138],
+	"granite3-moe:latest":     [5.910233, 30.171839, -14.592036, 2.837715, 14.968947, 7.973449, -1.947099, 22.603910, 5.753875, 28.569792, -29.349314, -8.091486, 11.312101, 1.635193, -5.761543, 1.040875, 11.834714, -9.726265, 32.092159, -35.898239, 5.296179, -1.744865, 10.235253, 8.739743, -18.815981, -21.212708, -2.026226, -11.171863, 4.101085, 5.968066, 5.965814, -15.224704, 10.082129, -15.279737, 18.013292, -2.612643, 10.464241, -4.025630, -23.993250, 10.991261, -5.576319, 16.998455, 12.869885, 1.175003, 8.280812, 12.199456, 5.665783, 17.075645, -21.379660, 134.591949, 17.959675, 25.953827, 24.120113, 17.409477, -3.193029, 11.241851, 9.530139, -12.056103, -4.452151, -0.271666, -8.649167, -12.428501, 11.823528, -2.827536, 6.282444, 2.521258, 81.461189, 15.298338, -6.584094, 6.603992, -9.683152, -20.925825, 14.799984, -22.270163, 5.907921, -0.833670, -0.667122, 2.703424, 8.369345, -29.402857, -5.762743, -4.563978, -14.395526, -5.133063, -0.124120, 21.461061, -46.637238, 21.278778, -13.015121, 6.351894, 0.946589, -1.913780, -23.189226, -22.912931, 2.840406, -1.546906, -7.285774, -46.161957, 14.655342, 22.227030, -1.301839, -29.357885, -4.336789, 92.163307, 31.110840, -12.271099, 133.932693, 11.734908, 8.070328, 13.812112, -9.864179, 8.783020, -7.283248, -3.531120, 1.100977, -12.874575, -10.192871, 7.477422, 10.881320, -9.561343, 14.172203, 4.870072, 2.330342, 17.557465, 3.327132, -9.740596, 11.291760, 2.834996, 6.555978, -11.344123, -6.842443, -18.598501, 0.758174, 27.208895, -4.902824, -10.958056, -55.134621, 5.104919, 9.981282, -18.919510, -2.571935, 6.974890, 3.927437, -19.922615, -2.466738, 15.617038, 7.675040, 23.773939, -11.432803, 10.740471, 2.242301, 2.842329, 10.344113, 27.188694, -1.918746, -13.990616, 16.756920, 35.803310, -78.949562, -4.729366, -1.778806, 13.663913, -15.737623, 12.059778, -2.778493, -32.010658, 26.099768, -13.162955, 4.330981, 1.682542, 18.967260, 21.632246, -9.698850, -0.876216, 7.460126, 31.513733, -9.148410, -9.306043, 11.722775, 9.375770, -4.402502, 0.820594, -2.146913, -0.772945, 4.326170, 3.219532, -3.498803, 3.872662, -2.795810, 21.738985, 8.454886, 3.528073, -51.829544, -7.674875, 29.892965, 4.129654, -14.099564, -10.897354, 1.669887, -13.013097, 13.575606, -17.992901, -14.653707, 0.020252, -11.186063, 13.581324, 19.104755, -23.943918, -12.188995, -606.941040, 26.324015, 16.674185, -245.366394, -21.670475, 18.335369, -11.000136, 9.296625, 10.886216, 20.467735, -13.730722, -20.725361, 4.470503, 11.960427, 4.898565, 12.459598, -14.548553, 4.149211, -15.506085, -18.006060, 13.561481, -20.042671, -22.978138, 6.908902, 1.049820, -0.854378, 1.581094, 0.374944, -9.595409, -3.925668, -6.747450, -10.677427, 6.557892, -6.434878, 5.401307, 13.277419, -2.632324, -0.683253, 1.979805, -5.869758, 8.772308, 12.582617, -10.796289, 19.006266, -17.695400, -4.079504, -12.024731, 8.692499, 1.124261, -7.884833, -16.905472, 15.445214, -14.615550, 3.221821, 0.610939, -20.212936, 19.146111, 20.472776, 1.601480, 0.468653, 3.196803, 1.815385, 13.413655, -22.140823, 60.012062, -1.340733, 3.945969, -11.371457, -1.361438, -3.070006, 11.228428, -5.329631, -144.731705, 5.585932, 12.705352, -35.361210, 5.402887, -22.210894, -11.764082, -10.529637, -1.680632, 0.978505, 2.944480, 20.638897, -6.945477, -7.315181, 11.820772, -34.066574, -18.815998, 7.285862, -310.943420, 10.461345, 8.083133, -17.976057, -41.162441, -10.750346, 21.369474, -12.099899, -2.343555, -1.060179, 5.924515, -27.179865, 27.975437, -24.421137, 8.659485, -11.785131, -8.149186, -16.439056, 18.028307, 16.566832, -3.038346, -5.568111, 10.469731, -15.127536, 3.346772, 3.865383, 17.047647, 2.386874, -22.667032, 2.163410, 4.887003, -34.768108, -20.684711, -2.799956, 11.562538, 10.669291, -9.255035, -15.569283, 4.301740, 3.707626, -3.648189, -4.176328, -60.779106, -11.225652, 19.511650, 1.790995, 5.985159, 3.463223, -12.580428, 16.799856, 5.869790, -10.124368, 15.806347, 2.647198, -30.748787, 19.855984, -4.171432, -11.744167, -22.470293, 24.969671, 11.727280, 15.210749, 8.970414, 16.108089, 8.825900, -14.952545, -2.905712, 3.212929, 5.783626, 3.144367, 7.653569, 8.793571, 18.702265, 32.081905, -7.501697, 5.425734, 9.319864, 18.756527, -0.797716, 24.006330, -10.961069, -10.404222, 19.896885, -9.230650, 4.023786, 2.242430, 11.963447, -21.049240, 28.664921, -0.018579, -6.656789, 5.994405, 0.081606, 16.420298, -2.969083, 19.234186, 35.404984, 2.104683, -6.588911, -4.507048, 11.724409, 0.875683, -14.133551, 14.877223, -14.408369, 18.683701, -1.446610, 9.133441, -7.892530, -6.295300, 9.652716, -20.884869, 21.768879, -12.346772, -3.359479, 6.986233, 13.784170, 0.572282, 7.517334, -8.316103, -13.257382, 0.193735, 4.251532, 3.993173, 7.994923, 7.460558, -4.763490, 0.509111, -4.459810, -5.884681, 6.396923, 19.855288, -24.027454, -12.866510, 20.686476, -11.135347, 20.923820, 16.078331, -2.949684, -1.226427, 8.968578, 22.852449, -29.107840, 6.632169, 5.622813, 1.041288, -5.823139, -4.928956, 18.278795, 33.794529, 4.684862, 22.151001, -7.426543, -13.763552, -16.645988, -22.810810, 2.881028, -5.612767, 11.180967, -1.649996, -4.432179, 27.134329, -0.441665, 8.890188, 2.353519, 23.824448, 10.895750, -13.273428, -22.800825, -3.511435, -27.803415, -12.941121, 11.970799, 15.007806, -0.841761, -28.025873, -105.639389, 2.059234, -14.899364, -2.124423, -16.817869, 10.499662, 10.596079, 18.546129, 8.066932, 12.004951, 22.481850, -15.728971, -12.705195, 6.798643, 14.554914, 0.320640, -17.136030, -63.131405, -7.932780, -11.177350, 1.902833, -17.347900, -4.074312, 2.654741, 17.940918, -9.682328, -9.935791, -13.366525, -41.804581, 4.703305, -17.044256, 20.260994, -10.287923, 4.194438, 3.924806, -6.371835, 1.905040, -8.786492, -2.804256, -13.645334, 16.522154, 1.133029, -8.615473, -29.775444, 7.035748, 1.730713, -0.813137, 16.944988, 10.494798, 2.471127, -17.134981, -31.594488, 23.679823, 13.700656, 11.259837, -1.393510, 24.213184, 0.594864, 19.768007, -13.118903, -11.077514, -0.601305, -3.697073, -32.656422, -0.415944, 7.779869, 4.679246, 11.707513, 6.999825, -2.554271, -24.157104, -9.998021, 31.281271, -2.985908, 24.705114, -1.338948, 22.305872, 19.819147, 9.865061, 11.178469, 2.398972, -3.570000, 7.230457, 0.888181, -14.226989, 46.485043, 17.359091, 3.809138, 6.635123, -0.314788, 3.362935, -30.779945, -9.531190, 4.284771, 7.837353, 1.124498, 3.366127, 0.463016, -5.294980, 47.850704, 31.871058, -24.967073, 28.440832, 16.762613, -14.390144, -3.231953, -3.074816, -3.089787, 25.336514, -4.237896, -17.347275, 4.750774, -3.120321, -9.072162, 7.193366, -3.365265, 22.379192, -27.603596, -10.382773, 12.032997, 13.361623, 39.001362, 2.413692, -1.322643, -0.037110, -20.961409, -10.183399, -29.152880, 5.241103, 10.669732, 20.543423, -5.331350, 1.994862, -9.354459, -1.184517, 10.231616, -2.086119, 16.555714, -24.108919, -0.185303, 9.941091, -31.506477, 4.210682, -21.643034, 13.312780, 5.000698, -8.312237, -5.463239, 4.870609, -7.243948, 11.544545, 22.768454, -19.319820, 27.335552, -9.086289, -20.315342, 2.858530, -2.399571, 15.547544, -16.229853, -9.716353, 24.424154, 4.235106, 1.609934, -2.482428, -10.919318, -22.083069, 20.712894, 6.792789, -13.999515, -13.862957, 5.544644, -8.237908, -3.620728, -3.646942, 6.877924, 11.945367, -3.056302, 6.354347, -6.600556, -15.953823, -4.270577, 1.169367, -6.677580, 16.452318, 16.953333, 19.365353, -6.327491, 7.413441, -12.910234, 2.577799, 16.230818, -4.133609, -13.569720, -6.301839, -23.760706, -23.358809, 2.195146, 16.855768, -0.563238, 5.755409, -3.455244, -32.581589, 7.362275, 18.226692, 19.073891, -2.889560, 19.596006, -1.847176, -35.965023, 1.004362, -7.027219, -5.233053, -20.280478, 0.991492, 13.779267, 9.769767, -21.226250, 17.940596, 29.440950, -29.128031, 8.452128, -4.824466, 10.684808, 20.092855, 26.152739, 4.438401, -1.690581, 4.477728, 6.524579, 6.030072, 1.748050, 15.232247, -15.253749, -17.199966, 7.050725, 8.969168, -6.712491, 0.262175, 18.948431, -14.762223, 3.300240, -16.243732, 27.153996, 6.637587, 8.960702, 2.332713, 2.129112, -20.963326, -7.114325, 154.496887, 29.787031, 20.236282, -15.849831, -18.543089, 9.013681, -0.718873, 0.173534, 5.531977, 4.930003, -1.656422, -9.749697, -10.176377, 19.374493, -5.686445, -17.527151, 15.001776, 9.464457, -8.424029, -4.525608, 4.965511, 17.492561, -45.308231, -10.879435, -19.515076, -11.785777, 3.658191, -16.178417, 14.068483, -34.595879, -27.803114, 13.434047, 10.387864, 9.177503, -3.742442, 3.387829, -14.570332, -16.079081, -13.967793, -13.029141, 8.431294, 9.493267, -37.916645, 11.751331, -7.741582, -9.540742, -9.780586, -2.392878, -7.884768, 24.516655, 5.469426, 22.973869, 4.623669, 0.818910, -13.362819, 11.305873, 15.217951, 38.241253, -18.920458, -7.783936, -24.702259, 6.128407, -7.217543, 6.566425, 3.321952, 12.285128, 10.420273, -10.466927, -1.137602, -45.324993, -23.398872, -1.858458, 4.114500, 6.052193, -4.522782, 3.243538, -15.193633, -9.601283, 13.992054, -3.657720, -2.701909, -4.004214, -5.377882, 5.377224, 11.000648, -7.405027, 1.843000, 3.031408, 0.600323, -2.970792, 17.768391, 21.035534, 4.459052, -4.868608, 10.544399, 32.276367, -16.770508, -25.057745, 22.813244, -1.189920, 0.185402, -4.412627, -15.375156, 8.816485, -12.502228, 9.948814, -11.768686, -25.030834, -3.497505, 15.223778, 14.495900, 6.307823, 2.048729, 3.727947, 4.793435, 1.462687, -13.884528, 28.467163, -9.824447, 17.221064, -4.340904, 84.522087, -9.192829, 3.444686, 11.529037, -29.989532, -6.438466, -9.154363, 2.159212, -0.047614, 0.761222, 15.104259, -8.908755, 17.157633, 17.249439, -11.561856, 2.290649, -33.708897, 12.175060, 6.480854, 13.553982, 27.790758, -2.050587, -1.667087, -0.347506, -31.598421, -11.870290, -11.244892, -14.819248, -12.020247, 4.971821, -11.470158, 6.001273, -5.779810, 2.686197, 10.615704, -28.309267, -4.003694, -2.492575, 7.409032, 1.473416, 0.224771, 23.014254, -12.801020, -1.441912, 4.213243, -3.654921, 3.744973, 14.405837, 4.242178, -0.559246, -13.136638, 17.801081, -9.959801, -4.565279, -20.750568, -2.337087, -8.057383, 6.145776, -4.304662, -0.613708, -18.388718, 5.646697, 9.464123, -2.128031, -21.369585, -8.266222, -5.082420, 4.194514, -7.263211, -13.140247, -17.395348, 8.898520, -22.150940, 16.164259, -17.093813, -6.663777, 16.258022, 12.335802, -0.267598, -11.785521, 1.338714, 12.239976, -3.571168, -0.146445, -18.020006, 6.885163, 16.264381, -4.918350, 4.971773, 76.457527, -12.357264, 3.178989, 13.972784, -17.338724, -53.524242, -6.375101, -4.122850, -22.754618, 14.133285, -4.418598, 43.380146, -11.148293, 4.401711, 28.880716, 15.529907, -0.676613, -0.171630, 0.536704, -10.389581, 2.925949, 11.855149, -0.448767, -0.929932, -33.834824, 13.853880, 9.716026, 14.485927, 14.891504, 14.833290, -7.500180, 4.391589, -8.655250, 0.577355, -21.714235, -1.320842, -7.442505, -10.944694, 3.189536, 7.628008, 19.579287, 2.439533, 20.716356, -13.067318, 9.035158, -16.808308, -5.522472, -17.607924, 17.414011, -8.096068, 9.939309, -8.186272, 25.231920, -10.807590, 3.736280, -12.678774, 24.700350, -0.242279, -6.742983, 1.259873, -19.214605, -1.968102, -2.376017, -23.590635, 21.091452, 8.131310, 6.561183, 1.719079, -28.888819, 36.343685, 11.717151, -12.670501, 4.639385, -7.803811, 22.195730, 19.185535, -6.677001, 26.386492, -3.577444, -16.164864, 4.930522, 11.364016, 3.831939, -6.234989, -12.353168, -10.714200, 3.026727, 24.589510, 5.440655],
+	"nemotron-mini:latest": [-4.120038, 0.711547, -0.991387, 0.004026, -1.077477, 2.553976, 2.530914, 2.930088, -1.663075, -9.611784, -7.395068, 1.774247, -1.324328, 73.225052, -5.281722, 2.737439, 2.400243, -1.402780, 1.992445, -4.149514, 2.973329, -0.158553, 0.706131, -0.794330, -10.818738, 2.617805, 1.099990, 2.067191, -2.974320, 0.851727, 0.877344, 2.652516, -0.264260, -4.018307, 0.211724, -3.586028, 1.168983, 5.037290, 1.529527, 8.201462, 1.489855, 0.662053, -5.822824, 9.936276, -12.036245, -1.903112, 0.211410, 18.828714, 0.515774, 3.865259, -1.826139, -2.890222, 10.017738, -2.666493, -2.559016, -1.053113, -0.235626, -2.227849, -3.839067, 3.596564, 4.224329, -1.323692, 5.120552, 2.175209, 0.829468, -4.033612, -2.261686, 5.083168, 0.101488, -3.915002, 3.628062, 1.046779, -0.380685, -0.274417, 2.351975, 15.734816, 0.374099, 0.691106, -0.060621, -2.209861, 0.662186, 7.077675, -3.494924, 2.054063, -2.514411, -8.185169, -5.401649, -0.725978, 1.581109, 1.139995, 1.942631, -2.779927, -2.191670, 2.025638, 0.612604, -0.178798, 0.157385, -2.053472, -8.938956, 0.739186, 1.525432, 0.878664, 1.668942, 5.504779, -5.052747, -1.644298, 1.138673, 3.222665, 1.341606, 2.952670, 0.594875, 4.221618, 0.232396, 0.093857, -0.212279, -1.150683, -2.670379, -2.926181, -2.985442, 0.635515, -0.426584, 2.417369, -1.481988, 2.317802, -2.719497, -1.611657, -1.703588, 0.953786, -0.350103, -1.516849, 8.258697, -4.537434, 10.151460, -0.154285, 0.275394, -2.321293, -2.241937, -0.865318, 3.311065, -0.864827, 0.238390, -10.650764, -0.704152, -0.155314, 0.941014, 0.940291, -1.228785, 1.792918, 0.400272, -0.020393, -2.235857, -2.379023, -0.884208, -1.071011, -0.353613, 1.854456, 0.373228, -2.420345, 2.117029, 0.967106, 0.389013, 0.405657, 0.641711, 2.102300, -0.564671, 1.786480, 1.200751, 2.116862, 1.117827, 1.999030, 0.900137, -1.613231, -3.082652, -0.171636, 16.678534, -2.551064, 17.355364, -2.315114, 2.343719, 4.020091, -0.091294, -1.180222, 2.152392, 1.284814, 1.819574, -0.914603, 0.087546, -0.195623, 0.566840, -0.391695, -1.265887, 0.111707, 0.199983, 3.345609, 0.113387, 3.538256, -1.050486, 0.954481, 0.589756, -4.268962, -0.181804, -1.883747, -1.323785, -1.564519, 0.341528, -0.845928, 0.497681, -2.345117, 3.805112, 0.932985, 0.535690, -0.180280, 0.362456, 1.036545, -0.628692, -0.446748, 1.080820, 11.536877, 0.591646, 1.228665, 0.372442, -0.730955, 2.290035, -2.020121, -1.763714, -2.694391, -1.878185, 0.836524, -0.188292, -3.075404, 0.790510, 1.291605, -0.383334, 1.130402, 0.174084, 1.006510, -0.393040, -0.974748, 3.473970, -0.211168, -0.270774, -0.625871, 1.146878, -6.138976, 1.381635, -1.229375, -1.954015, 0.785005, -0.259023, -1.243465, -3.147345, 0.036017, -0.300099, 1.216044, -2.399574, -0.199756, 0.727305, 0.343471, -1.013342, -1.159037, -0.158584, 0.651164, -1.548232, 0.229590, -1.746092, 1.144064, 0.296701, 1.036485, 1.424864, 2.413066, 1.017853, -3.336513, 6.409276, 0.828699, 2.074002, -1.148892, 1.069268, 1.794530, 0.473608, -1.042171, -4.028618, -0.823841, 45.092232, 0.499238, -3.086152, 1.981941, 3.784814, 1.625841, 0.185681, 1.623077, 1.301530, 10.822767, -0.671696, 0.794590, -0.879296, 1.547901, -0.376069, 1.050125, 2.094375, -0.981282, 0.385528, 1.449807, -0.987496, -1.290804, 2.214968, 0.314758, -0.956137, -19.336226, -1.869080, -0.173513, 1.197664, -2.100749, 0.455114, 1.793306, -0.289776, 8.657969, -3.828034, 2.115680, -1.153528, 2.196890, 0.552541, 0.225195, -0.810835, 0.276522, -0.774294, -1.171754, 0.850747, 1.342212, -0.524275, -0.331715, -0.946968, -0.507740, 0.222476, -0.987027, -1.894623, 0.522413, -0.205396, 0.040288, -1.290450, -0.977973, -1.869196, 1.261484, -1.666716, -0.284608, -0.436559, -0.545000, -0.715979, -0.166126, 0.670192, 0.940825, -2.828389, -1.134436, -0.899325, 2.032113, 0.320457, -4.589130, 8.592949, 0.534099, 1.087636, 1.638817, 0.974161, 3.443498, 13.883852, 1.441055, -2.571116, 2.594905, -0.364179, -0.294388, 2.921740, -0.786495, 0.846024, 1.279914, 0.683920, -0.413425, 0.507789, 2.832381, -0.448878, -0.343657, -0.303370, -0.161274, -2.049919, -1.201430, -1.143355, 0.977756, 0.662818, 2.506859, -1.170880, 1.953620, -2.775410, 0.015006, -0.479959, -7.531682, 0.899604, 2.448596, 2.964283, -1.932552, 3.637474, 0.381634, -0.179009, -1.792929, -8.571464, 1.107289, 0.402882, -0.189643, -0.238225, -0.905025, -1.205879, -13.095944, 2.613966, -0.171709, 6.627252, -1.115685, 2.353951, 0.132741, -1.710857, -2.648761, -0.796763, -0.522527, 0.585942, -1.651109, 0.255303, 0.537679, 0.975097, -1.685947, -2.443757, 1.417167, -0.510178, -0.198517, 0.937678, 0.789442, 0.840620, -1.329556, -0.172105, -0.440448, -0.602640, -0.109061, 0.651741, -12.457002, 0.059279, -1.437292, 0.538130, 0.311163, 1.682207, -1.106353, 0.227294, -2.364755, 6.533086, -0.738967, -1.399149, 3.559586, -1.284343, -0.219988, 0.876421, 0.677565, -18.688902, 0.853895, -0.000468, 1.992689, 2.193721, -0.362417, -1.373153, 1.063549, -1.210380, 0.156591, -0.620162, 0.880547, -0.036021, 0.034574, 0.631643, 2.201981, 1.394046, 2.824895, 0.577892, 0.768444, 1.934837, -0.434852, -1.415033, 1.258731, -1.099884, 3.263321, 0.534831, -0.107584, 3.674846, 2.530393, 0.900507, 0.172028, -1.092034, 1.853497, -1.588007, 1.578310, -2.032119, 0.472409, 1.753164, 0.327808, -0.348128, -1.324746, -2.508019, 1.241464, 0.319598, 0.877602, -2.989245, -3.144064, -2.318311, -0.637586, -0.162224, 0.491977, -0.323674, -1.160390, -2.205703, 0.892704, 1.986179, -0.395600, 2.590883, -0.255823, -1.498204, -1.697398, 0.942510, 0.514705, -3.356550, 2.165736, 1.195359, 1.960565, 1.946751, 4.116686, 1.360731, -0.122520, -0.692262, 0.690307, -0.819340, -0.026968, -3.153666, -0.293139, -2.619861, 1.647953, 0.397924, 2.970011, -0.801167, 1.623489, -2.442073, 0.281765, 2.345005, -0.428580, -0.378712, -1.266423, 0.697442, -3.972456, 0.459115, 0.054410, 2.708170, 1.367932, 0.121947, -0.192998, 1.148419, 1.134884, 0.684813, -3.908074, 0.385640, -2.308028, -3.269670, 0.681865, 1.228514, -2.240721, -0.038122, 2.127180, 1.625306, 1.225985, -0.355789, -0.968112, -0.642233, -0.018510, -0.080948, -0.625026, -0.619176, -1.715348, -0.857318, -0.805670, 1.988237, 0.686941, -0.268361, 2.369648, -0.571147, 0.494377, 0.755837, -1.246764, 1.288619, -0.426436, 1.476565, -1.107682, -0.480250, 0.829176, -0.665775, 12.427722, -1.107234, 0.508904, 1.021439, 2.089632, 1.291023, -1.364481, -0.939930, 3.418002, -2.319383, -3.269610, -1.065671, 1.291446, -0.274947, 0.576194, -2.398307, -0.737248, -0.854351, -2.128317, -1.328655, -0.022095, -1.454497, -0.697447, -3.481141, -2.382665, 1.787495, -0.401200, -2.694632, -1.162223, -0.632749, -2.025328, -0.725091, -0.069712, -2.185089, -0.000626, 1.125538, -1.185506, 0.623551, -0.292109, 1.436726, -0.416528, -0.276014, 0.445017, -0.182922, -2.799006, -1.169948, 3.278847, -0.088762, -0.718230, 0.678400, -8.835315, 0.711337, 4.121616, -0.614538, 2.355267, -0.112793, -0.703143, 0.084732, -1.509933, -0.853429, -0.621376, 1.783902, -0.466435, -1.201708, -1.254324, 2.378747, 0.730460, 0.751666, 2.915919, -0.293372, 1.134014, -1.637401, 1.384194, -1.208763, 1.278465, -3.401009, -0.416853, 1.349236, 0.265887, -0.491655, 0.338902, 0.046566, 1.604596, -0.828569, 1.914908, 0.603021, 0.668613, 1.035247, -1.976538, 0.547510, 0.741862, -0.712208, -0.140703, -0.306417, 2.992609, 1.154214, -3.117952, -1.204803, 1.705627, -0.341271, -0.724283, 2.879889, -1.396597, 0.326837, -1.837137, -0.145596, 2.602407, -1.475421, -2.564685, 0.153613, -1.007649, 2.566728, 1.798051, -1.111874, -1.914844, -0.251985, -1.032269, -2.417181, 0.386264, 2.147783, 0.609979, 1.213180, -0.877796, 14.091846, 3.847449, 0.423241, -1.325773, 2.689366, -0.681466, 0.303946, 0.536334, -1.924172, -0.537626, -0.211922, -1.426566, 0.108177, 1.366546, -1.519071, 1.384233, -1.265454, -0.874439, -1.977748, 2.847632, -1.154848, 0.280883, -1.014754, -0.756724, 2.138873, -1.008579, 0.834355, 0.203577, 2.042146, 0.453894, 0.127396, -0.615425, 0.499804, 1.003915, -0.702003, 0.017341, -6.025723, -1.420500, -0.687909, 0.596307, 1.167132, -0.038750, -2.649878, 0.592671, -2.787702, -0.196119, 0.063055, 0.668322, -0.615288, 1.128135, 0.236712, 1.931765, 0.324320, -1.003573, 2.949824, -2.625035, -0.616568, -3.071275, 0.683821, 1.501190, -1.695908, 1.724847, -2.192132, 0.929228, 0.788780, 10.747573, 1.629455, -2.474180, -3.146395, 0.859624, 0.774199, -1.251014, 2.078666, 2.501911, 1.471870, -0.297639, 0.461419, -0.439189, 0.285172, 1.070387, -2.302202, -1.006166, 1.847144, 1.137581, 2.586329, 0.419907, -0.220795, 0.216225, -8.465442, 0.099509, 1.474104, -3.087058, 0.495809, 1.102142, -0.809829, -0.959253, -3.254194, 1.518994, 1.416016, 2.448883, 1.156614, -1.985732, -0.743689, 0.775749, 0.676275, -3.917594, 0.841390, 0.533269, -1.898400, 2.786950, -0.650033, -2.154030, -1.252244, 0.675762, -1.768394, 0.496726, 0.008671, -1.109113, 0.511296, -0.684406, 0.123855, -0.970569, 0.687012, -1.307973, -0.268266, 0.212728, -1.362499, -0.984690, 0.688890, -0.090851, 0.284262, -1.270952, -1.581297, -1.226040, -1.528808, -0.386581, 0.856013, 0.004575, 0.941012, -2.166249, 1.776329, 1.837429, 0.628708, -2.208156, -0.374813, 1.076870, 1.776096, 0.382324, -0.488550, -1.710793, 2.189963, -2.193386, 0.093312, -0.845919, 0.104237, 0.710326, -1.325291, -4.074582, 1.386155, -1.362984, 0.410051, 0.012850, -0.968827, -0.408812, -1.409927, -0.287591, 0.247293, 1.112642, -0.587984, 0.325716, -1.847045, -0.020897, 0.189674, 0.602778, -2.612956, 1.027682, 2.356382, 0.865034, -1.407413, -2.106067, -0.822541, -0.933455, 0.477729, -0.495813, 0.537693, -1.861761, -1.403352, -6.387653, 0.705838, 0.470781, -2.941714, 0.783758, -0.259212, 0.886806, 0.850351, 1.435188, -0.549425, -0.036716, 2.705648, 1.418543, -0.313935, -1.051778, -2.252609, -0.280129, 0.694766, 0.761522, -0.271357, -1.351199, 0.640514, 1.083974, 2.511422, -2.037619, -1.219427, -2.328991, -0.820033, -2.001815, -0.951034, -0.098321, -0.849577, -2.076226, -0.997272, 0.595438, 0.946848, 0.758755, -1.450784, -0.305076, -5.650831, -8.792426, 3.355259, -1.786570, 2.729236, -0.378069, 1.466914, -0.833942, 0.370441, -0.244222, 0.412328, 1.553317, -0.930822, 4.017974, 0.073678, 0.974857, -0.160225, 1.702837, 0.350974, 0.863976, 0.198430, -0.704718, 1.089129, -0.970466, -0.562427, -0.765933, 0.419560, 0.141009, 0.808560, -0.106841, 1.581344, -0.773920, -0.169148, 0.952092, -1.016463, -0.930622, -0.270154, 2.687557, -0.144619, -2.424826, -2.775465, -0.119660, -1.743029, 1.909370, -1.058119, -0.395479, -6.764561, -1.881087, -3.378350, 3.582016, -3.627630, -1.611914, 1.979339, -1.333062, -0.443549, 0.659470, -1.481132, 0.129869, 1.579404, -0.498087, 1.202215, 0.517257, 1.187280, -0.495755, -0.530649, -0.748800, -1.750288, -0.609772, 1.921934, 1.126505, -0.699985, -0.606027, 0.535565, -2.238772, 1.818144, -0.099951, -1.194464, -1.686393, 3.186570, -0.295847, 2.763192, 0.504193, 1.273901, -1.208753, 0.086733, 0.165759, 1.768771, 0.595038, -1.188871, -2.569773, -1.262780, 0.666044, -1.761325, -0.310582, -1.538806, 0.583538, -1.650966, 0.745306, 0.559141, -0.714205, 0.601608, -2.177805, -0.222916, 1.424731, -1.399902, -0.936130, -2.812508, 3.003098, -2.821995, -1.441686, 0.722480, -0.221376, -2.076210, -1.700804, 1.431648, 1.481000, -0.299278, 4.351317, -0.526738, 0.039587, 2.241846, -0.252646, 2.819084, -3.088144, 1.070554, 1.794986, 2.111658, 0.148549, -0.007432, 1.690574, 1.282853, -0.432758, -1.751136, -1.546041, -0.981039, -1.576312, 1.121798, -0.282309, -3.665804, 7.227891, -0.709919, 2.227319, 0.522394, 0.157321, 1.136676, 1.581767, 1.099154, -0.096848, -0.392499, -2.107337, -2.612769, -0.693609, 1.215889, -0.680887, 0.334657, -0.100493, -0.047590, 0.808193, 0.011965, -1.278237, -0.322477, -0.451016, -0.290758, 1.560038, 0.385432, 0.231185, -0.875851, -0.266539, 0.410902, -0.729591, 1.699829, 0.787876, 1.182208, -0.008651, 2.660361, -0.572185, -0.612873, -0.112692, 3.604835, -0.530817, 0.051921, -5.093953, 0.265582, 0.836985, 0.299400, 3.361667, 0.034345, 3.555858, -0.633714, -3.697381, -0.093796, -2.146292, -0.563349, -0.569491, 1.168705, 3.316072, 2.372404, 0.628388, 0.249071, 0.663080, 1.232120, 0.514750, -1.542467, 1.624973, 3.168562, 0.715801, 0.252510, -1.825370, 0.838917, -0.690693, 1.408648, -0.603600, 0.657941, 0.174603, 1.296732, 0.436804, 0.050912, 2.139699, 1.385786, -4.000784, 0.246543, 0.840831, 0.360396, 0.365326, -1.859958, 1.072193, 4.836280, 5.614607, -0.762466, 0.193026, -0.065408, -1.214958, 0.840172, 0.268356, 1.812064, -0.389973, 0.393329, 4.318853, 0.066308, 1.596426, 0.832660, -0.840357, 0.333118, 0.839722, 0.852698, 2.209760, 0.025591, 2.125010, -1.646916, -0.723286, 3.143192, -1.101914, -1.700010, 1.047884, -0.223058, 2.431010, 1.964614, 0.653134, -0.463696, -0.627593, 2.656858, -0.097684, -1.882960, 1.139552, 1.045971, 2.037109, -2.034312, -1.832128, 0.680087, -0.660167, -1.160689, 0.488568, 0.865070, 1.748245, 0.230193, -1.580301, -1.652970, -2.123022, -0.067249, -0.627098, 0.204704, -1.211521, 1.547984, -5.892817, -2.677408, -4.311819, 1.337836, -1.609935, 2.590586, 1.401891, 1.580933, -0.199859, -1.311303, -0.122473, 0.138223, -0.673348, -1.165293, -1.090712, -0.684587, -0.586424, -0.708054, -3.427097, 1.882552, -0.522692, 0.561334, 0.331448, -0.025046, 0.001716, 0.111988, -1.193842, 0.149182, -1.432872, 0.407708, -1.451432, 0.554744, -0.795857, -0.737003, 1.430273, -1.225829, -1.842758, 0.283813, -1.230769, -0.821415, 0.238414, -1.203064, 0.274010, -0.386080, 0.925102, 3.856348, 1.943906, -1.123320, 0.578006, -0.552951, -1.392412, -0.578234, -1.076848, -0.779367, -0.488024, 0.237059, 0.257224, 0.829635, 1.144735, 0.533952, 4.213201, -0.862246, 3.529323, 1.945243, 0.128751, -0.112346, 0.621396, -1.864277, 2.387606, 0.323706, 0.333739, -0.709678, 3.908322, 0.227164, -0.612416, 0.798259, 0.966086, -0.063895, -0.440826, 0.711594, -0.652940, 0.051804, -0.522274, 1.035364, 0.094041, 0.549716, 0.843440, -3.225272, 2.123960, -1.514610, 2.690585, -0.775321, -0.071940, -0.535653, 1.187980, 0.831565, 1.854406, 0.000889, -0.852992, -0.829247, 0.172915, 0.607090, 2.261234, -0.577887, -0.223631, 0.521219, 0.723837, 0.590144, 1.578582, -1.424523, -0.684811, 0.591908, -1.767461, 1.336581, -0.299392, -1.484266, -0.102010, -1.956955, -1.095171, 0.078446, 0.439536, -0.652268, -0.425554, -0.678455, 2.091768, 1.964156, -0.223128, -1.026644, -1.186581, -3.105243, -3.229987, -0.752073, 0.075520, 1.096994, -1.848168, -0.196812, 1.016304, 3.466650, -0.694447, -1.023418, -14.223376, 1.919446, 0.381060, -3.340447, 0.826075, 0.816453, -1.251773, 2.895489, -0.060124, 1.730335, 2.127186, 0.565133, 2.378071, -1.229836, -0.742808, 1.838672, 1.338558, 0.631052, -0.619672, 0.882627, 2.335589, -0.625769, 1.046581, -2.041212, -1.303177, -0.164399, -3.171140, 1.470407, -0.889253, 1.296392, 0.634034, -0.207822, 0.259247, -0.725371, 0.001653, -2.288200, 1.107141, -1.614146, 1.216030, 2.295768, -0.101712, 0.414017, -0.562038, 0.089368, 1.577849, 0.685630, 0.147774, 0.146493, 0.822355, -0.116812, 3.139313, 0.675288, 0.796764, -0.850924, -0.228959, -2.156343, -0.138593, -0.749696, 0.454034, -1.208326, 0.781049, 1.850878, -2.035055, -2.229368, 1.617962, 1.064388, -0.470157, -2.642538, 1.063012, -1.676346, 0.640605, -0.741748, 0.111837, 1.850261, 0.909178, 0.296234, -0.474660, -1.068781, 10.984878, -2.305124, -4.163750, -2.249644, -0.530302, -0.153850, -1.439902, 1.934667, 1.940011, -1.304234, -2.033562, 0.970429, -1.567272, 0.150162, -1.086878, 1.947379, 1.889387, 0.886676, 0.217362, -0.471853, -1.020441, -1.004290, 2.009722, 1.053132, -1.872133, -2.491603, 2.672606, -0.367110, -0.318741, -3.769757, 1.251371, -1.036518, 0.079666, 1.006413, 0.130778, 2.303427, -0.465912, -1.414802, -0.156666, -1.730807, -0.007416, -0.340561, 0.407324, 1.083252, -0.441983, -0.073141, -0.873232, -0.551301, 0.586208, -0.683542, -0.972109, -0.738272, -1.419129, -0.784290, -1.908111, 1.048776, 1.112361, 1.486281, 0.111446, 1.104565, -0.807457, 2.507578, -1.981948, 1.769490, -2.790846, -0.760780, -0.691268, -2.152983, -2.790889, 0.395263, -0.895858, 0.498073, 0.401830, 0.704802, -1.952522, 1.557720, 0.809939, -1.992283, 0.627598, -0.369150, 0.148913, -0.515960, 0.805138, -3.113520, -1.499959, 1.451287, 0.783183, 1.751803, -1.487845, -3.151612, -0.932889, 0.726342, -0.188820, -1.592091, 0.559781, -0.627185, -0.400851, 1.804299, 2.004846, 0.692698, 4.681093, -2.665981, 0.445699, 8.054358, 2.640569, -0.900600, -0.463237, -1.379600, -0.612482, 1.097925, 1.127894, 2.100681, 1.524652, 1.551129, 0.966367, -0.950918, 0.893282, 0.698710, 0.359133, 0.211088, -0.471570, 1.671166, 1.074640, -2.069757, 1.551382, -0.398993, -1.369890, -0.042689, -0.711931, -1.962018, -1.024064, -0.372630, 0.417639, 0.132114, 1.045222, 0.840674, -0.908199, 0.391756, 2.193799, 0.282602, 3.832250, -1.424545, 1.820177, -1.719480, 1.646421, 0.961916, 0.217981, 0.255106, 1.913936, 0.190913, -1.364432, 1.199430, 2.213551, -1.193318, -0.664005, -0.420860, 1.669962, 1.418524, -0.394983, -1.861321, 1.488656, -1.248461, -0.576054, 0.635768, -0.623423, -0.997677, -1.254219, 0.986769, 3.248942, -0.955006, 0.854402, 2.133770, -0.320522, -1.354895, 0.454979, -0.012248, 0.552766, 1.914990, 1.185136, 2.165647, 0.199811, -0.308959, -0.028716, 1.249419, -1.675196, -1.521112, -0.339116, 1.913370, -0.689238, 2.666677, -2.409486, -2.133920, 3.054758, 1.964703, -0.315777, 0.363499, 0.573369, 2.127483, 4.207703, 0.016491, -1.383848, -0.984142, -1.302895, 0.014881, 1.543277, -0.704435, 1.940655, -3.410389, 0.076540, -1.549596, 2.088219, 1.593110, -1.010924, 0.459974, -0.171825, -0.381877, 0.606960, 2.214293, 1.074450, -0.551852, 0.740115, -2.793068, 0.969792, 0.736658, -2.834861, 0.135510, 1.133700, -0.046007, -0.757080, -1.968265, -1.003406, 1.827479, 1.787595, -0.795879, 0.652203, 0.912435, 0.781862, 1.392779, 1.042729, -0.594439, 1.543801, 0.719703, 1.755961, 0.769557, 1.011888, 1.827554, 1.321115, -0.699680, -0.805209, 1.331005, 1.346431, 0.137598, 2.289003, 2.015941, -1.329036, -0.920616, -1.236863, -2.428651, -0.045954, -2.082391, -1.175521, -0.129158, 3.060616, -1.286548, 0.604616, -1.110072, 3.035698, -0.322158, -0.781870, 0.110167, 3.495563, 3.330736, 1.314148, -0.113278, 1.077862, -0.976463, -0.785742, 1.020356, -0.517181, -2.684402, -1.044523, 0.541801, -2.004713, 2.293418, -1.004760, -0.700835, 1.549735, 3.455583, 0.475930, -1.473817, 5.537278, 0.213822, -1.876355, 0.905125, 1.255895, -2.484677, 1.040425, -0.040539, 2.137502, -3.163284, -1.409379, 0.256282, 2.132105, 3.210793, -1.145492, -0.661010, -0.634037, -0.233181, 0.770570, 1.238388, 1.671249, -2.510621, -1.484118, -1.273155, -0.988314, -0.242384, 0.810030, -0.151580, -1.895777, 0.313042, 1.864123, 1.276387, -1.818408, -1.310768, 0.070033, 2.705100, 0.858920, 0.825496, -3.990004, 0.593197, -2.876160, 1.959472, -1.371782, 0.832580, 1.282863, -0.808298, -1.550045, -0.889529, -0.778651, -0.024775, -0.583029, -1.319810, -1.694946, 0.992727, 1.765150, 0.382885, -0.933830, 0.787292, -1.936184, -0.335143, 1.341774, 2.008909, 1.092610, 3.670715, -2.714478, 0.282289, -1.960100, -1.777495, 0.233954, -0.183462, -3.187413, -3.235547, -0.510362, -0.675362, 2.505375, 0.799373, -1.800931, -0.038045, 2.503350, 0.384175, -0.722759, 2.429610, -2.018362, 0.962014, 0.837315, -0.957888, 0.412519, -2.701824, 1.410287, -1.368492, 1.301900, 0.818753, 0.811378, -1.196061, 2.048241, 1.041366, -1.547336, -2.016230, 2.380980, 2.117204, -0.677142, 0.057818, 0.756670, -2.078185, -0.890906, 2.161394, 0.247280, 1.819405, -0.063750, 1.226707, 0.709927, 0.356287, 1.686424, -2.409448, 2.856144, 0.773759, 0.808617, 0.047684, 1.338031, -0.535488, -0.595152, 0.761812, -0.957863, -0.864500, 0.314729, -0.487064, -0.201846, -1.174313, -0.291597, 0.292136, 0.236110, -1.395016, 1.495998, 2.849277, -1.691042, 1.584919, 0.613885, -0.725392, -1.718326, -0.025391, -0.855043, 0.526444, 2.155432, 1.081418, 0.551306, -0.672045, 0.043811, -3.512949, 0.118685, -2.592598, -1.259155, 8.140533, 4.402100, 1.710823, 2.198954, 1.540033, -0.867360, -1.285532, 1.220744, 1.402004, -1.458586, -0.967569, -0.511343, -1.010175, 0.901374, -1.329423, -0.092832, 1.533759, -0.573599, 0.414767, -1.191547, 1.919353, 1.747830, 1.063049, -1.393383, 1.878260, -1.874399, 1.818325, -2.460202, -0.242430, 0.140436, -1.051427, -3.195519, 1.888365, 0.435774, 0.737258, 0.276280, -2.694203, -0.301707, -2.604361, 1.532419, 1.518063, -0.370006, -1.342802, -0.244225, -0.002818, 3.910189, -0.583169, 1.374988, 2.067113, -0.452302, 0.077718, 1.090760, 1.546354, -0.520948, -1.686626, -0.994464, -0.251366, 0.434730, -0.858291, -2.092327, 0.750991, 0.926950, 1.421602, -1.898955, -0.393026, -0.265128, 1.158799, -2.103380, 2.672633, 1.070735, -2.140461, 0.094960, -0.050966, 0.937487, -1.677205, -0.379211, -1.080526, 0.138581, -0.393641, 0.579808, 2.001253, 3.651859, 1.969691, 0.106348, -0.583629, -2.020965, 0.364155, -1.018937, -0.334326, 0.100708, 2.472999, 1.387101, -1.237134, 3.721129, 1.519421, 0.288411, -3.984812, -0.458773, 1.429405, -2.381791, 0.315217, 1.841713, 1.592145, -0.731089, -1.414775, -1.360183, 1.536013, -2.214556, -0.113089, -2.205454, -2.285627, 1.277101, 0.387191, 0.300088, 2.730620, -0.324076, 1.435243, -4.239858, -0.348253, 1.104643, -0.157990, -1.573482, 1.888076, -0.055906, 0.084812, 3.412147, -1.579177, 0.927390, 2.076808, -2.592346, -2.119700, -0.426326, -2.111193, -1.366671, 1.894348, 3.043322, 0.314089, 0.855666, -0.823261, 2.400388, 3.133502, -0.007147, 0.929144, -0.959069, 2.531155, 2.545533, 0.651116, 1.824821, -0.305034, 2.389764, -1.887502, -0.342360, -2.491367, 0.105732, -2.119360, 2.546562, -0.861071, 0.350706, 0.546464, -1.499462, 0.417675, -1.568473, 0.692838, 2.891876, -0.138319, -1.334901, -0.685667, 2.280439, -3.468426, 1.245887, 3.320926, 0.961338, 0.064542, 0.788105, 0.255912, -1.706218, -0.012525, 1.368832, -1.081255, -0.986172, -1.213450, -3.795788, 0.981057, -2.302466, -0.479100, -1.789316, -0.336261, 0.745378, -3.662589, 1.394853, 1.858585, 0.538975, -2.899513, 1.675120, 1.649777, 1.056105, 0.253166, 1.032704, -3.451596, -0.954366, -1.085564, -1.470250, -0.455586, -1.062200, -0.381748, 0.024468, 0.769484, -1.769355, 0.767472, -1.093527, -0.806618, -1.543110, -0.067089, 0.991702, 0.546402, 1.282086, 2.375943, 0.573987, 0.246033, 1.475257, 0.881634, 0.960805, 2.125304, -2.657653, 0.495612, -3.120179, 1.015683, 2.455024, 0.125900, 0.913431, -2.534758, 2.095575, -0.868909, -0.881125, 2.216185, -0.330804, 1.522615, -0.883653, 1.006859, 0.971254, 1.638694, 0.220840, 2.122283, -0.392955, -1.281651, -1.033705, -0.103710, -1.495686, 1.121236, 1.653453, -0.438871, 1.789696, 1.247634, 2.360607, -1.315232, 0.561946, 2.374259, -3.479356, -0.540455, -0.840258, 1.715602, -0.234809, -1.251655, -1.898509, 0.255810, 0.593610, -0.073663, 0.538080, 1.206753, -3.032221, 0.840534, 0.575739, 0.430918, -1.668601, -0.021352, 1.749554, 0.093953, -0.793195, -0.089617, -0.688069, 2.306179, 0.025835, -1.011802, 0.501585, -0.684657, -0.254115, -0.464431, 0.515950, 1.056903, -2.669991, -0.666975, -0.128514, 1.223944, 0.527231, -0.195675, -0.755060, -0.190950, 0.396863, 3.357061, 0.951689, -0.078219, -1.589841, -0.261371, 1.059966, 2.341810, -1.904779, -0.672253, -1.052846, 1.018597, 0.426214, -0.437104, 0.219385, 0.845618, -0.369086, 2.143687, -1.378152, -0.174391, 1.404780, 0.235700, -2.630498, -0.155800, 0.440964, -0.581919, -0.554132, 1.995181, -3.085170, -1.265410, 2.949584, -1.820384, 0.762439, 0.989417, 0.843609, 0.334507, 1.677501, -1.313708, -1.523981, 1.961761, -0.529740, -1.010412, 1.014432, -0.229633, 2.283781, -1.050624, -0.105361, -1.455855, -1.433940, -1.100140, -0.032599, -2.266694, 6.980674, 0.656378, 0.734336, -0.126044, 0.430033, 3.097827, 2.321077, 0.226326, -1.319470, -0.127232, 0.093848, -1.422338, -0.954043, 1.363927, -1.915344, 3.112850, 1.503816, 3.074722, 1.388993, 1.862996, -1.237698, -0.221567, 0.368206, -0.434996, -1.519875, -0.307488, -0.340854, 0.959727, -1.168284, -2.045792, 0.590173, 1.913429, 0.157029, 2.070282, 1.542187, -0.093681, -1.133967, -4.985939, 2.223901, 0.215805, -0.791171, 0.177974, -0.156168, -0.152497, -3.195990, 1.625665, 0.261392, 0.373906, 3.411873, 0.168360, -0.363593, -1.106421, 1.543631, 1.072047, 1.209616, -2.594547, 3.677001, -0.704093, -0.769831, 0.156167, -0.087139, 0.926822, -1.497946, 3.488669, -0.498456, 2.585355, 0.638786, -1.265893, -1.515186, -0.170610, 0.865639, 0.803070, -0.554210, 1.500199, 0.309627, 1.830099, 1.093678, 0.254188, -1.840083, -0.918117, -0.036964, 0.289066, -1.471333, 1.905748, -0.871947, -1.397716, 0.605884, -0.819838, 0.557486, 5.255325, -0.804167, -3.643383, 1.296990, -1.812794, 0.670182, 0.265029, -0.622519, -0.463429, -0.722114, 2.248491, -1.450041, -3.686104, 1.488080, -0.845360, 1.296761, 2.761697, 1.610896, -0.908817, -0.892923, 0.403756, -0.817155, 0.639292, -0.273899, 0.300914, 1.317295, 1.791245, -0.553654, -0.858094, 0.485510, 2.715864, -1.518985, 1.013328, 1.216428, 0.444058, -0.232796, -0.925205, -1.291752, 0.640105, 0.620285, -0.529986, -0.973146, -0.448751, 2.131908, 0.735240, -1.234902, 0.726922, -1.787336, 1.955731, 0.182495, 1.072748, 1.496628, 0.862454, 0.281887, -1.703280, 1.182623, 1.009730, 0.203992, -0.383488, -1.241500, 0.157968, -0.299652, -3.449785, -0.833084, -1.119349, -1.600843, 1.947359, -0.422477, 1.036511, -0.660727, 1.346081, 2.259709, 0.005040, 0.475642, -1.003685, -0.806614, -2.083865, 0.251728, 1.807141, -1.653711, -0.165789, -1.348610, 3.222886, -0.930775, -0.260357, -1.350029, -1.151363, 0.002781, 1.085186, 2.021380, 3.255248, 2.172967, 0.429837, 0.142068, 0.517678, -0.855089, -0.015564, -0.612736, 0.428050, -0.434883, 2.011954, -3.612378, 1.087736, 1.699983, 2.713425, 0.922748, 0.093344, -1.775274, 1.903097, -0.538384, 2.335316, -0.430231, 1.502170, 0.710999, 1.891477, 1.617951, -1.193772, -0.091001, 1.150020, 1.137367, 1.016144, -0.704379, -0.729804, -4.031040, -1.497000, -0.716411, 0.565852, -0.817705, -1.236752, -0.003742, 0.082370, -1.786841, 1.358746, -1.566733, 0.149939, -0.566612, 2.939605, 1.724788, -0.867142, -1.651294, 1.588408, 0.252766, 1.094429, 0.123979, -1.050261, 0.554460, -1.169814, -1.511499, -0.590811, -2.767133, 0.227685, 2.132695, -1.274353, 2.810912, 0.161260, 2.231755, -0.657446, -1.932288, 0.575805, -2.594836, 0.439481, 1.191852, 1.916686, -3.117297, -0.135380, 1.094884, -1.781616, 0.894625, -2.073685, 0.569600, -3.029050, 0.454545, -1.965721, -0.043368, 1.174816, -4.324808, -1.971886, -0.892658, 0.274854, 1.206510, -0.470141, 0.381961, -2.620223, 0.854657, 0.446380, -0.212430, 1.419069, -0.547126, 0.105516, -1.209780, -2.682103, -1.880812, -1.429287, 0.499100, -0.314258, -0.705809, -2.116629, -0.565486, -1.117403, 1.373470, 0.272802, -1.326758, 1.485354, 1.984464, -1.024545, 0.423898, -1.764647, 0.073051, -0.039845, 0.183949, -0.371510, -2.058260, -2.036934, 2.249296, -0.461656, -0.099479, -3.824854, 0.369886, -2.602445, 0.992267, 0.074909, 2.877812, 1.390216, -0.761985, 0.766279, -2.096076, -0.395331, 1.374215, 0.465398, -2.055075, 0.898630, -1.237133, 0.962724, -0.809912, -1.234195, 1.781487, 1.675791, 0.494026, -0.095817, 3.185573, -0.966046, -0.111969, 0.928911, -1.077095, -1.845717, 1.504816, -0.035544, 2.500948, 2.056350, 0.379942, -0.206586, -2.846630, 1.946602, -1.338225, 2.343692, -1.819101, 1.845730, -0.347326, 1.954134, 0.912669, 2.298723, -0.607779, 1.672875, 1.409226, -0.789262, 1.448115, 1.299492, 0.116671, 2.487772, -1.355910, -2.678572, -1.558875, 0.580180, 0.590901, 1.325215, -0.057967, -0.311906, -2.353572, 2.648535, 0.847283, 1.431074, 0.008418, -0.167587, -1.298527, 1.413521, 0.183468, 0.588779, -1.778174, 1.798621, 2.213693, -1.839578, -0.902392, -0.295681, -0.338337, -0.407854, -0.662204, 1.821490, -0.994281, 0.572172, -3.238699, 2.376113, -2.768938, 1.119103, 2.424388, -1.931760, -1.624551, 1.031867, -1.209697, 0.135001, 0.003992, -0.016782, -3.550475, -0.853445, 3.142736, 1.401256, 0.931407, -0.329627, -1.389046, 0.637283, -2.099255, -10.023037, 1.456096, -0.970827, -1.056861, 4.013074, 0.707597, -0.933795, 0.389084, 1.395731, 1.056435, -0.340454, -0.859893, 5.982233, -2.853151, -0.138226, -1.655911, 1.634749, 1.421054, 0.831926, 0.126816, -1.328772, 0.069112, 0.528022, 0.414295, -0.416438, -1.513069, 0.284723, 1.566565, -1.183272, -1.399087, -2.383024, -0.178824, -1.625272, 1.678425, -0.720101, -0.558266, 3.070326, -0.131884, 1.884880, -1.760699, -0.805406, -1.572882, 0.167965, 0.924478, -2.315411, -3.594679, -0.191877, 0.584720, 1.051175, -1.576983, -0.080467, -0.021043, 2.254590, 1.110942, -0.666646, -2.083611, 0.514277, 0.829726, 0.224291, 1.543836, 1.234613, 0.376972, 1.818251, -0.209732, -0.566087, 1.624159, 3.366616, -0.206569, 1.980258, 1.895052, 1.364278, 0.919897, 0.264587, 1.088133, 0.582130, -1.190331, -1.655422, -0.907104, 0.023765, 0.100906, 1.119427, 2.556829, 0.162990, 1.239510, -2.127181, -0.617491, -2.381191, -1.019343, 3.000513, 1.816126, -1.614661, -0.050322, -0.523640, -0.337145, -1.060617, 0.882646, -2.076514, 1.735131, -1.757560, -0.276296, 0.333477, 0.596730, 0.271413, -0.522215, 0.114551, -1.091682, 0.329627, 0.378582, 0.914257, -0.586515, 0.579029, -1.491025, -1.264606, -0.445841, 0.448031, 0.496971, -0.880069, -0.114983, -0.221523, 0.487215, 0.714126, -0.402273, 0.408664, 1.147038, -0.760143, -1.694010, -0.081308, -0.435332, -1.635392, 0.896697, 2.169811, 2.090110, -0.813811, -0.952351, -3.554594, 0.585532, 0.306426, -2.123318, 1.154331, -0.666047, 2.382697, 0.883779, -2.237601, -1.200651, 0.739852, 0.376667, 0.110609, -1.139884, -0.008385, 0.512189, 0.339850, 0.762335, -1.133032, -1.156095, 1.560428, 1.599772, 2.613970, -0.694430, -0.603383, 1.225852, 0.804845, -3.920221, -0.558424, 1.343242, -1.414377, 3.862548, -0.190104, 0.504851, -2.784963, -0.546281, 1.307294, -1.283382, 1.889746, -1.210009, -1.020280, 0.245678, 1.187263, 1.127843, 1.782872, -0.792363, -1.004296, -1.971464, -1.409945, 0.637755, -2.414992, -1.974596, -1.783538, -0.581649, -3.213634, -1.754282, 1.805191, -1.235651, -0.634839, 0.161638, -0.630046, 2.751879, -0.346227, 1.534349, -1.358303, -2.453912, -0.716559, -1.780014, -0.012675, -1.166174, 1.113524, -2.457147, 0.085220, 0.456114, -0.555790, -3.015698, -1.995655, -0.416727, -0.781068, -0.272143, 1.189361, 0.488557, 0.088896, 1.462901, -1.933535, 2.070531, -2.085645, -0.445012, 0.790365, -2.194353, 0.478096, -2.488342, 0.780072, 1.310410, 0.764107, 1.467836, 0.061706, -1.439014, 1.356705, -1.356657, -2.498615, -0.836300, 2.444328, 1.076395, 0.076544, -0.074112, -2.894312, -1.433758, -1.842554, 1.837269, -0.219312, 1.458094, -1.320004, 0.663539, -0.989509, -0.992193, -0.552759, -2.793687, 0.661098, 0.456114, -3.696326, -0.845517, 0.579287, 0.631815, -0.721260, -0.693439, -1.766250, 0.179440, 0.045727, -1.527113, 1.598382, -0.579325, -1.706727, -1.797411, 1.867648, 0.261699, -0.328072, -0.635934, 0.433348, -0.163198, 0.862483, 1.879573, -0.044012, -1.600434, 0.239456, -0.764510, 0.363217, -0.055961, 1.180820, -0.187238, -1.391579, -1.321477, 0.524783, -0.842347, 0.494022, 1.234072, 0.888916, -0.213973, 2.283071, -0.640313, -0.366253, 2.689683, 0.974300, 1.940052, -0.781908, 0.498088, 0.626403, -2.054290, 0.619295, -1.442814, 0.339450, -0.896698, 0.819171, -1.252571, 1.571107, 0.813266, -0.024424, -1.298713, -0.318076, 1.542235, -0.038043, -0.141969, -2.487703, -1.355928, -0.945512, 2.720232, 0.380313, 1.114524, 0.202963, -0.939223, -2.362461, 1.172898, 0.703404, -2.639584, 1.718914, 1.316741, 1.989495, -0.500795, -0.263018, 0.600646, 0.534427, 0.872801, 0.952260, -0.956119, -0.038358, 3.718248, 0.846966, -0.646319, -1.449061, 1.844202, 1.019944, -1.184215, 0.492499, 1.330654, 0.425558, -0.577335, -0.761218, -2.921366, -0.649320, -0.737245, 0.773534, 0.505134, 0.850217, -0.231921, -1.084704, 0.951773, -0.947775, 2.458345, 0.228633, 0.637817, -0.918365, 2.076945, -0.997954, -0.818052, -2.483827, -0.715143, 1.765916, 0.444802, -0.832161, 0.735556, -0.761760, -1.906592, -0.684428, -2.505757, -0.460853, 4.265587, 1.137864, 0.901250, 2.105058, -0.177623, 0.255737, -2.108447, -0.867666, -0.941735, 0.019559, -0.750972, -1.863474, 0.763017, 1.300454, -0.693253, 0.871873, -1.323827, 2.125471, 0.757597, 1.097759, -1.434198, -0.303980, -2.549546, 0.835890, 0.303656, 2.135996, -0.941849, -0.846840, -1.505864, -0.575415, 0.283727, -0.269862, 0.042036, 0.818713, -0.595052, -0.639762, 0.008008, 0.710045, 0.399087, -1.756857, -1.380597, -1.379578, 0.839164, -1.650149, 0.144494, -0.460965, 1.593046, -1.647523, 2.081554, 0.554536, -1.200486, 1.084391, -0.221848],
+	"command-r:latest":     [3.343781, -0.275814, 0.454973, 2.667439, 1.139656, -3.217637, 4.925746, -3.962457, 0.980116, -4.160393, -0.081422, -0.652773, -8.862714, 1.343202, -5.292061, 1.600222, 1.756423, -1.142717, -2.589377, -1.096659, -3.947407, -0.648118, 0.817330, 1.249553, 0.793624, -2.220591, -0.204749, 1.277867, 0.021358, 1.544170, 0.013234, 0.185021, 4.115247, -9.109859, 3.825158, -1.512020, 6.406564, -0.101245, 1.215347, 9.500904, -2.644686, -1.778385, 2.493311, 3.694325, 2.697178, -2.767993, -0.238474, 0.226412, -2.744470, -8.258098, -1.947246, -0.046177, 0.038588, -1.605423, -3.539018, 3.948240, -0.214077, 4.202616, 2.432104, 1.813233, 2.096451, 2.935130, 0.656194, 5.365718, -5.580070, -2.143265, 1.548568, 5.144696, 2.590589, 1.195513, -0.704586, -0.201354, 5.630354, 5.713121, -0.529943, 3.442473, -6.183206, 4.220657, -7.694033, 0.866504, -1.198718, -2.125260, 3.210089, -0.438421, -1.928844, -2.251470, -0.333551, 0.184342, -7.158889, 2.333304, 5.827986, -1.217000, -1.540859, -1.085593, 4.221012, -3.591006, -0.941764, -4.606610, -6.769041, -3.331358, -5.846883, 4.248871, -1.351012, 1.112255, 4.982079, -0.553816, -8.118864, -7.048461, -0.545580, 9.831732, 1.885043, -6.806152, -3.613053, 2.099929, -4.145867, 1.162989, 1.121790, -2.270707, -8.104484, -6.115410, -2.037424, -7.011181, 0.001888, -1.288358, 3.032661, 0.126157, 1.127175, -6.765113, -3.277233, 7.282475, 0.761192, 3.777529, 1.852839, 2.810993, -0.631536, -4.668180, 1.938219, 2.619288, 0.619725, 2.242327, 5.060790, -2.529539, -0.770671, -4.668029, -4.334256, -9.579988, -5.887598, -3.204403, 0.897181, 1.638821, -3.258848, 0.508208, -1.711256, 0.853810, 1.074834, 3.388533, -4.291458, 0.995875, 2.646734, -5.680473, -0.288449, -6.776384, -9.162459, -1.270542, 0.827329, 2.764098, -1.154817, -0.195002, 6.544490, -3.651612, -1.612096, 6.639533, 1.412141, 6.283203, 0.454502, -8.110965, -6.140530, 4.895475, -0.497331, 0.110950, -0.089417, -6.977638, -2.880804, -3.110812, 4.872985, 1.500566, -7.171577, -4.647212, 7.078329, 1.592515, 2.269818, -18.472763, 0.006562, -1.392788, -2.109365, -5.681810, -3.214898, 0.137075, 0.587586, 2.040688, 1.737877, 4.950655, -0.858913, 6.382308, -3.120993, 2.484262, -8.143215, 2.378926, -5.380285, -1.986916, -1.269735, -1.843263, 3.896823, -0.723419, -0.157345, -0.013623, -2.125233, 3.241222, -1.010446, -3.961627, -1.199333, 0.755091, -2.785051, 1.401513, -4.724794, -1.633887, 1.459495, -0.033115, -6.036390, 4.669405, -4.966288, 1.337360, -1.756613, -5.167958, -2.344655, 1.396638, 0.532748, 2.133362, -0.396743, 11.526321, -4.582005, 0.181447, -4.303258, 0.268173, 1.737863, -2.500399, 2.446739, 3.530474, 5.115706, -0.140839, 1.108029, 0.909780, -2.008389, -5.470373, 0.338981, 0.940498, 0.570977, 1.607189, -0.582565, 9.699156, -1.516018, -4.180067, 0.006470, -1.705021, -0.201452, 1.868154, 5.046639, -0.543539, 2.834447, 0.851093, -0.483138, -0.673084, -0.134258, 1.958529, 2.368384, 4.490047, -0.262858, -0.578660, -0.738702, -2.535524, -3.219797, -0.901652, -0.634149, 2.746953, 2.354932, -2.567454, 0.257080, 0.781687, -0.264464, -2.603856, -4.045373, -1.040004, -3.437232, -2.968499, -2.634792, -0.498064, 4.383482, 1.315463, 1.387401, 1.231248, -1.059394, 2.242662, -0.016071, 1.068977, -2.130052, 0.015135, 3.459895, 5.286757, 1.234284, 0.498344, -7.126027, -6.524753, 1.711594, -3.448725, 0.484982, -2.226021, -1.551042, -0.363871, -4.593443, 5.032470, 0.229082, 0.945881, 0.547474, 3.921237, 1.543457, -0.297181, -0.133541, 4.762614, -14.255990, 3.052345, 2.819587, -1.130518, -0.061253, -10.462576, -0.012670, 1.216432, -4.723098, -8.699916, -1.943423, -7.672225, -3.830274, 6.106357, -0.417323, 4.071136, 1.773671, -2.477784, 2.864450, -6.309965, -6.137336, -0.701815, -0.103799, -2.204011, -1.030298, -5.722357, -8.312103, 1.610195, 5.209179, 3.954740, -0.303180, 0.789238, 0.953849, -1.653946, -0.028072, 0.702633, 2.581787, -6.662658, -0.629628, -3.159878, -1.503025, -0.126721, 4.615642, 3.865401, 2.765733, -0.818197, 6.066393, 2.374908, 6.187591, 6.537537, -0.019076, 3.288919, -5.977790, 2.153242, 0.991696, -0.039763, -11.709957, -0.893349, -1.118323, 3.427664, -1.120850, 0.135702, 1.743049, -1.238715, 2.078863, -2.075931, -0.037239, -0.564981, -1.293588, 1.072095, -0.338301, -2.457089, 0.007143, 8.301857, -1.482941, -0.036358, 5.913482, -8.312625, 0.060764, -0.279726, 3.240084, -6.741280, -3.533299, 0.940669, -0.835060, 0.515219, -3.326610, 1.702149, 2.362168, -3.379984, 3.258184, -3.359754, 0.055251, -0.165966, 0.051318, -3.324284, 1.929422, -0.247458, -1.470549, -0.333021, -2.628627, -1.262493, -3.525706, -0.617524, -4.303742, -5.836284, 1.296722, 0.014102, -1.484960, -4.936589, -0.689694, 0.321401, 2.738512, -1.299144, 6.999551, 3.567674, -5.994262, 2.176221, -0.711455, 4.148783, 0.060613, -1.590490, -7.158051, -6.485102, -4.934273, 0.439335, 2.992083, 3.171849, -2.529648, -1.449935, -4.031073, 3.411590, -4.730103, 0.968958, -2.242847, -0.205325, -1.519718, -4.031760, -4.223692, 2.338820, -6.251447, -2.773652, 0.104261, -4.704476, 2.292274, 0.262622, 4.491196, 4.754832, -2.544113, -4.407736, -5.914155, -0.302099, -6.706584, -4.492346, 1.692832, -9.301826, 9.576107, -0.444878, 0.137933, 1.152962, -4.318294, -0.968798, 2.868977, -2.964417, -3.277911, -5.268424, -4.372011, 1.424269, 0.014607, 3.218485, -1.883414, 1.762284, -0.289330, -5.437626, -7.087764, -0.159299, -1.004035, -5.852360, -5.730863, 1.219243, 1.629844, 3.632569, -0.337558, 1.670067, -1.428962, -1.175744, -1.142460, 0.374826, 6.155413, 1.335459, 1.716548, 11.388607, 2.994390, 3.725840, 0.906250, 0.117606, -1.771317, 0.330543, 0.424982, -0.582504, 2.455512, -1.767787, 0.228882, -0.110461, -0.961098, 1.787151, 1.718345, -1.663210, 3.544751, 1.349301, 0.522274, 0.339371, -7.864332, 2.604174, -8.005915, -0.673629, 2.810067, -0.181011, -0.017242, 0.579071, 2.580422, -3.446174, 1.407088, -2.714404, -0.277343, 3.203218, 3.641239, -6.671936, -2.366600, 3.390316, -1.838757, 3.543766, -1.180676, 0.108697, 0.589361, 0.718842, -0.867555, 1.638399, 1.760780, -1.804819, -5.801441, -2.121068, -0.244877, 0.771526, -0.534767, -0.391645, 0.103339, 0.140907, 4.610296, -3.564982, -1.094342, 5.672598, 2.585696, -3.173497, 7.090618, 3.261405, -0.779932, 1.518792, 6.489270, -1.592150, 5.778431, -6.621472, -2.670308, -2.659441, -0.039881, -5.708166, 0.706590, -1.042225, -3.893016, -2.343265, 8.167517, -0.055133, 6.047810, 0.527957, 1.596409, -3.899053, -2.582129, -0.307368, 0.271911, 2.005986, -0.455206, 0.028330, 6.084834, 5.658875, -3.678282, -3.797084, -2.407085, 0.354242, -0.929543, -3.714456, -0.342444, 1.106999, 1.264105, 0.489304, -3.434623, 8.497966, 2.258908, 7.230909, 4.122110, 1.886335, 1.002831, -1.768519, -1.672922, 3.181017, 1.183783, -4.637954, -1.089755, 1.120303, 2.670096, 3.081970, 8.499523, -0.027745, 10.147956, 0.212397, 4.096593, 2.483257, -3.835583, 15.020834, 1.363512, -3.375948, -0.857642, -1.005622, -0.980444, -2.889248, 2.868831, 6.088690, 11.332556, -0.186268, -1.513368, 0.664520, 6.496457, -0.486679, -4.162925, -4.577507, 5.446809, 3.785655, -1.462451, -0.259170, 2.017063, 1.200777, -0.624123, -2.015638, -4.934658, -4.058458, 2.973224, 0.663845, 1.939258, -1.204918, -7.879530, -2.132903, -0.208517, -6.144309, 0.932803, -0.732682, 5.596436, -3.285809, -4.797080, -0.504366, 7.276794, 2.753779, -0.387244, 0.156124, 2.562421, -2.016191, 1.556009, 0.002035, 0.732805, -1.960444, 1.017454, 2.803624, 2.804175, -7.543849, 3.584458, -0.438897, -4.197019, 1.522082, -2.848772, -1.030844, -3.298563, 2.560498, -0.370281, 1.889420, 0.653683, -2.452834, 1.302697, 1.754493, -2.797449, -2.347050, -3.446241, -0.561618, 0.518104, -4.443400, 0.354994, 4.314837, 1.230740, 0.180836, -3.967075, -0.616630, 8.771847, 5.424160, -2.589878, -1.055147, 3.911415, -3.147947, -1.738150, 4.177715, 2.263200, 7.337656, 5.079623, 3.970637, 1.654971, -3.482348, 0.155131, -1.497517, 1.612941, 1.220724, 2.503623, 3.974868, 0.483474, -1.500351, -6.754654, -5.902539, -7.135888, -2.654474, -0.885237, -3.519493, -0.410429, -2.094261, -5.384110, -3.102675, -0.368129, 4.658718, 3.311850, -1.213326, 2.049461, -1.131403, -1.790761, 5.456171, -3.043935, 0.403564, 4.599469, -2.942664, -2.578368, -0.262799, 5.510591, -6.334243, 0.221941, 0.429937, -1.894348, -2.425150, -2.950989, 0.503390, -0.003128, -5.272776, -2.789283, -8.323901, -4.397413, 0.398002, 2.068184, 6.274145, -3.236507, 3.467738, 0.717246, 2.089747, 2.085892, -1.736258, 1.681871, 1.285726, -8.973390, -0.860758, 0.800301, -9.213509, -1.338597, -2.311807, -7.707338, 2.199563, 2.703517, 1.228356, -6.321905, 0.357646, -1.393916, 1.186368, -0.121182, 1.894334, 0.927539, -3.147604, -4.501308, -0.665627, -0.584694, 1.772024, 2.854347, 1.103954, 4.297790, 0.173993, -1.237015, -6.702371, 1.891608, 0.050805, 0.050248, -8.095572, 3.396281, 5.648885, 1.406410, 6.755433, -1.110750, 4.455142, -7.690240, -2.226854, 3.563312, 6.901121, 0.162094, -0.199623, 5.733426, -0.758055, 2.148874, -0.558426, -12.193730, -1.131083, -2.411241, -0.533229, 4.095393, 2.774926, -7.490507, -0.954049, 1.211633, 4.258538, 6.615598, 0.228282, -8.589841, 9.013588, 0.131699, 0.252958, 2.398877, 3.763722, -6.616025, 2.063315, -0.566855, -8.167114, 0.405287, 1.341827, 0.368449, -1.448815, 0.594255, -0.730430, -0.006993, -1.534255, -6.820147, 3.726596, -1.632142, 3.891737, -1.045272, 8.378374, -1.703666, -4.323789, 5.497786, -3.642643, -0.610038, -4.677728, -0.039480, 2.422321, 0.480538, 0.263621, -3.451740, -0.050342, -5.111891, -1.567019, 4.639090, -1.861093, 0.356844, 1.961862, -1.136813, 3.777646, -2.516439, -0.426840, -0.027631, -0.619671, 0.184838, 1.873936, 0.210117, 2.006767, 1.152207, -2.746292, -8.414000, -4.273118, 0.035932, -3.006817, 9.130335, 5.099446, -0.303868, 2.515733, -2.010652, -1.781340, -1.401030, 1.134259, 0.776905, -0.239682, 4.008242, -2.262364, 0.238568, -3.877701, -0.768959, -0.175720, 0.579776, -13.733640, 4.120509, 0.110667, 0.992731, -10.299074, 2.716733, 1.749644, -4.024142, -3.242632, -1.746854, -4.300363, 2.520753, 0.000492, -0.808906, 0.629711, 4.841471, 1.479105, 1.531526, -1.473484, 4.734550, -0.248460, 2.025939, -4.844282, 0.051011, 0.177340, 0.452104, -2.922164, -8.545454, -0.198014, -2.130960, -1.880845, -8.083119, 4.007063, 3.393564, -0.629868, 7.486115, -4.880897, 0.372874, -1.883602, -5.199387, 5.950924, -2.148227, 0.461290, 0.596155, -3.361518, 4.626005, -0.612265, 8.614533, -3.533565, 0.574394, 1.162784, -0.689196, 1.775221, 0.645540, -5.026666, -2.923343, -3.537855, 5.279396, 0.316125, -1.655858, -2.230857, -3.831468, -0.993665, 1.073296, -1.158993, 7.971602, -0.750468, 2.175637, -0.746131, -5.430524, 3.333487, 5.677142, -0.082347, -2.380460, -0.045493, 0.198291, -4.133145, 0.328931, -5.912801, 1.273532, -5.813002, 1.085318, 1.544092, 5.620995, 0.005771, 4.831237, -2.478376, 6.826854, -7.331986, 6.605777, 2.627247, 2.897933, 7.182114, -2.801941, -0.453709, 1.374557, -5.311098, -2.395463, 1.743300, -0.037800, 5.370473, -0.955576, 3.594490, -1.246708, -2.748146, 0.111598, -4.235927, 2.153038, -4.984576, 8.630300, 2.346491, -1.039889, 0.320968, 2.113551, 8.670997, -1.900339, 1.383541, -2.812557, 4.179167, 6.660100, 1.917526, -0.508494, 1.349263, -6.398208, 2.880494, 0.548544, 1.527744, -0.763936, -7.011359, 6.715817, 4.860614, -2.366866, 2.384355, -0.416671, -3.581128, -0.907592, -0.503966, 0.152084, 2.734455, 1.547329, 2.718583, 2.912473, -2.193865, -4.596483, 2.517669, -7.600883, 2.062043, 1.416978, -0.950477, -3.472302, -0.312917, 1.985105, 4.082620, -0.327396, 0.315942, -0.123894, -1.847559, -1.222050, 0.767697, -0.094673, 5.981305, -10.765258, 4.336037, 2.107987, -6.377103, 1.570145, 0.609587, -2.233925, -0.086135, 0.288981, 0.310782, -0.056502, 0.552609, -6.733884, 2.038222, 1.129642, 3.416910, -4.094422, 2.193018, -3.254340, -5.332660, -0.610538, -2.213975, -0.343964, -0.187319, 3.331956, -0.785005, -7.468774, 3.267446, -1.213103, 2.978286, 8.389747, 2.068025, 5.724949, 0.851759, 5.942196, -2.414881, 0.294918, -5.077850, 5.108668, -3.536914, 2.822123, -0.205441, 3.346289, -5.570610, 0.729711, -2.735071, 1.774440, 3.982238, -0.190283, -0.083524, -1.821509, 3.989396, 0.998905, -0.118806, 0.974467, -0.780703, 4.907207, -1.491556, -2.437642, -0.892695, 4.984404, 2.318693, 3.037425, -1.844034, -6.447559, 2.547562, -2.171201, -3.447818, 3.674742, -4.445561, -0.531791, 0.331127, -3.281976, -4.991351, 4.298832, 4.203872, 1.188574, 3.257704, -2.689115, 3.526842, 0.000659, 1.023238, 1.895292, 0.008100, 0.308081, 0.571350, -5.082287, 0.034021, -3.136824, -9.955914, -8.184452, 1.992786, 4.404663, 0.441305, 0.115716, -1.536674, -1.684408, -4.357782, 10.044423, -0.533050, 4.287567, -0.716581, 6.065951, 7.769640, -0.003222, -0.758130, -0.170485, 4.783528, -4.518212, -2.055881, -0.141658, -0.012537, -0.770738, 9.666203, 4.726260, -3.009871, -3.476159, -0.917047, -6.437633, -1.967011, -3.003409, -2.095386, 3.207532, 3.465004, -0.060364, -0.891863, -3.842914, 3.277990, -2.422456, 3.520365, 9.463841, -5.373384, 0.638642, -0.027223, 0.257485, 3.116745, -2.700408, 0.786203, 7.932927, -0.368764, -0.977473, -0.289243, 5.171692, -0.792496, -3.126659, 0.046145, 3.027295, 0.108762, 1.612523, 1.715320, 0.589681, -2.685192, 4.519046, -0.299773, -1.893413, -7.664293, 3.115619, 1.344446, 0.804518, 1.377422, 6.852480, -0.457485, 1.220877, 2.455439, -2.472999, 1.171533, -9.642070, -4.484784, 1.194985, 7.472065, -0.062002, 0.191905, 3.206121, 7.939042, 5.110579, -2.162325, 13.721641, -0.590828, -4.301183, 2.360873, 1.404462, -0.323969, -6.597520, 4.265462, 3.187601, -0.485109, 1.062674, 0.729921, -7.699634, 0.751458, 4.771646, -3.588130, -2.555057, -5.481992, -1.098283, -0.680966, 0.059207, 0.894726, -12.827731, 0.159850, 5.356339, 3.974762, 9.352292, 2.410261, -6.222835, -0.397519, 3.921666, -8.805064, -6.806501, 6.458934, -0.438970, -0.434269, 2.491107, -4.319752, 9.206378, 1.004822, 1.691844, -1.410262, 3.537293, -0.262292, -2.547208, -6.289035, 1.132843, -5.170214, 10.569111, -0.345776, -1.867645, -0.529611, -5.119642, 2.526826, -0.050514, -5.041572, 1.427129, 4.478755, 0.650192, -5.964411, -2.837880, 0.782313, 3.174311, 2.358122, -4.875733, 2.268328, -2.799593, -0.257828, -0.662918, 6.229756, -3.342180, -2.568766, -5.329382, -5.410961, 0.016886, -1.270424, -1.469224, -0.327146, -5.714297, 1.815491, 0.257221, -0.161375, 2.917636, 3.124429, -4.969270, -1.277912, -0.364768, 2.295934, 0.944646, -2.181587, 3.034961, 2.136451, 1.314268, -2.421805, -3.513581, 0.771600, 4.132788, -6.538311, -0.875464, 1.799943, -0.094945, -4.288071, -0.354705, 0.340733, -0.129236, -0.986896, -1.609142, -5.557634, 3.606832, -3.678227, 3.474812, -1.406148, 0.641834, 0.617001, 6.203650, -1.656687, -1.138457, 1.656011, 5.493791, 0.491741, 0.147250, 7.959695, 1.408193, 1.366536, 8.475793, -0.525615, 0.936636, -7.333754, 1.445627, 1.028849, -0.763335, 4.218029, -0.186917, 1.564881, -5.419790, -1.104242, 2.580936, 0.830451, 1.480569, -2.745472, -2.672715, -0.054050, 0.066279, -2.328207, 0.027136, -1.318324, -0.121855, 3.948206, -0.020364, 0.408698, -2.190275, -0.413554, -0.104187, 2.836448, 1.717294, -1.476446, 1.163178, 0.782593, -3.341428, -0.347680, 3.849605, 3.456213, -0.028440, 11.302976, 0.245774, 3.730564, 3.002578, -2.414288, 0.537928, -10.043763, 4.635122, 0.548230, -1.587269, -9.622440, 0.073291, 0.143492, -0.004344, 0.004642, 1.219415, 0.379508, 3.309238, -0.920879, -5.166202, -0.668245, -1.570390, 1.234253, 6.536276, 3.304239, -2.533845, 2.828630, -0.233467, -2.732973, 0.512803, -1.988147, -1.264982, 1.260414, 5.310548, -0.129676, 0.327829, -0.676909, -1.185600, -6.314182, 0.689383, -0.927936, -2.516296, 2.868223, 7.618333, -0.548516, 4.363874, -1.833384, -3.474023, 1.232027, -0.333526, 9.384752, -5.510630, 1.791963, 2.661515, 0.026483, 0.651698, 5.045140, 0.528704, 4.777268, 0.006308, 0.517817, -3.746511, 0.010729, -0.695228, 3.668597, 0.388150, 0.376637, -5.316834, -0.268401, -0.422471, -0.961711, -0.544995, -2.849309, -4.574137, -7.589499, -0.718866, 4.121716, 1.052236, 3.438159, 4.391444, 0.770096, 4.246434, 13.796817, -2.504740, -0.344975, -0.852745, -0.293723, 4.262160, 1.099506, 5.458565, 0.101993, 1.671578, 2.593444, -0.400709, -0.012712, -1.531975, 0.338049, -2.061397, -3.908940, 1.543308, 0.821022, -0.568642, -0.106388, -0.334749, -4.636153, 2.893635, -1.995710, -2.209117, -2.870719, 1.848953, 0.295602, -1.784778, -3.630996, 6.124650, -1.008684, -0.401929, 2.529864, -1.249757, 11.899358, 4.583395, 3.639607, -3.379079, -2.726734, -0.000634, 4.743830, -1.845286, 0.685491, 1.206432, -2.532401, -0.202496, -0.672539, -0.544298, -1.160769, 0.410462, 0.072620, -0.135203, 4.014706, -4.676506, 0.313707, 1.674464, 8.058844, -1.526760, 0.048420, 2.014790, 3.248549, 2.800738, 1.409468, 0.034131, -2.217367, 2.929803, -2.377814, 3.931602, -2.503181, -6.191005, -1.961296, 2.107366, 2.191592, 6.360295, -2.241297, -0.701410, -0.387093, -0.676544, 2.977399, 3.597678, -1.676969, 1.508200, 0.274561, 0.739619, -0.258541, 7.250594, -7.482875, 4.881182, -1.705662, 0.727951, 2.268465, -6.659826, 4.396453, -0.209851, 2.595658, 0.217294, 0.431553, -0.273800, 2.275743, 1.172862, 0.313280, 5.116271, 2.346627, 0.687950, -8.359097, -2.886562, -4.881249, -2.611210, 0.106734, -0.001841, -6.674205, 2.040544, 4.276235, 0.080222, 7.742506, -1.454112, 3.055201, -4.890799, 0.303546, -0.410229, -0.000000, 4.995249, -3.901189, -1.693798, 3.009526, -4.520331, 9.146361, 7.161877, 0.521853, -0.497622, 0.523855, 3.208404, -0.037914, 2.241517, 5.048973, -0.994681, -1.478314, 2.520435, -5.391829, 2.122844, 0.467170, -0.279453, 0.429281, -10.716861, 0.807114, -7.626911, -2.286292, -0.447248, -4.595525, -8.309303, 0.176255, 5.901590, -0.003131, -3.732668, 0.566506, 0.922014, -1.193130, -11.262701, 1.786454, 0.211564, -5.432101, -1.015164, -0.813636, -0.253057, 0.656902, -2.896779, 4.537857, -0.881212, -6.356174, -4.630372, 0.838714, -3.151064, 0.053268, -6.840989, 0.107878, -5.551770, 0.350055, 0.290700, 3.076490, -1.742435, -0.145971, 6.257341, -0.253585, -1.799793, 4.669433, 0.160630, 1.402631, -3.963378, 3.076532, -0.245547, 5.281942, 0.916202, 3.413607, -1.101269, -4.640521, -9.491997, 1.460039, 5.395495, -6.048810, 0.067125, -2.854902, -7.425605, 0.870857, 2.436175, -1.258716, 6.681839, -1.167981, 4.587613, 1.842767, -0.458165, -0.153182, 4.159862, 1.946693, -6.237222, -1.203096, -0.167390, -1.698844, -1.228860, 0.059560, -4.032769, -3.855078, -0.000377, 0.573616, 5.128727, -3.830783, -0.605543, -4.437639, -0.569940, 0.419119, 1.016710, -2.440497, -3.871237, 5.401674, 3.643533, -3.189250, -2.823871, -7.680893, -0.526801, -2.247987, 2.289295, 2.900466, 2.088039, -0.046243, -2.374596, 5.195171, 16.277483, -3.109166, -1.607331, 2.356828, 0.746973, 2.786855, -1.505428, 0.010781, 3.406119, -1.512498, -1.009099, 2.800846, -11.539624, -2.626078, -0.032230, -2.579077, -8.728624, -1.635912, 5.189236, -1.741071, -6.841572, -0.953889, -1.987891, -2.173831, 2.370864, 2.175112, -2.144978, 1.270610, 1.158945, 2.743232, -0.320985, -3.099590, -0.139097, 1.575312, 2.196729, -0.941464, -8.262540, -0.638440, -6.824471, -0.074786, 1.011750, -0.670687, -1.007823, 0.045868, -2.374631, 1.684719, -0.045337, 1.461761, -2.164474, -1.150534, -6.288441, 1.167940, 1.194985, 3.903839, -0.619031, 0.028055, -0.191233, -4.451861, 4.783247, 5.022339, 1.231301, 0.481011, 6.396512, 0.760756, -2.451174, 0.746618, 0.321768, 1.959963, 2.070307, -2.492432, -0.525367, -5.847020, 2.069601, -1.411780, 3.609716, 2.081538, 0.793948, -1.213014, 1.108669, -3.788055, 0.557970, 2.073365, 1.701093, -0.140462, 2.848723, 2.586497, -0.004066, -0.095212, 0.155530, -1.537058, -2.170103, -0.590924, 3.047679, -9.261539, 3.432003, 0.269873, 0.177570, 2.308136, 3.008693, 1.619454, -0.016827, 0.026867, -5.560940, 5.045645, -4.177537, 1.934065, -1.685166, 4.010276, 5.874868, 0.841095, 4.709139, 3.519374, -4.854788, -4.095969, -8.364801, -3.231043, 1.172581, -3.096938, -0.000019, 2.955939, -0.926446, 13.812618, 2.825307, 9.076035, 1.589575, 1.764802, 3.654632, -2.036473, 3.541994, 2.429921, 4.139260, -0.497009, -1.738610, 2.123885, 2.642132, 0.275264, 0.855564, -5.831653, 2.246616, 0.023545, -7.298081, -2.137527, 2.017174, 0.393187, 1.529370, -1.255867, 1.956797, -1.948093, 1.018055, -2.814951, 6.209908, 2.277940, 5.806929, 3.130994, -0.627798, 7.336646, 2.083061, -4.549458, 4.951706, 0.772481, 0.869098, 3.231348, 1.541711, 7.375397, 2.571870, -2.376264, -4.524474, -5.443826, -4.876126, -1.958362, -2.181725, 2.247090, -0.226717, -0.491189, -3.172199, -0.965165, -3.733088, -0.687946, 0.150711, 1.549857, -1.151845, -6.224751, 0.656077, -1.463655, -4.923444, 9.424564, -0.430541, -0.636936, 0.387190, 1.244944, -1.119326, -1.728293, -0.029272, -2.663060, -4.070954, 2.796909, 8.028749, 1.993612, -2.583833, 0.010637, -1.107144, -0.261289, 4.961470, -4.536589, -10.266493, 3.049201, 3.876967, 7.355556, 8.524590, 1.190615, -8.996363, -1.003444, -3.377495, 2.542468, -0.481224, 6.039497, 1.660977, 0.247955, 0.039296, 1.662247, -3.405310, -4.569863, -5.542986, -2.694810, 2.877109, -7.142367, -1.883978, 5.953273, 8.426865, 0.352936, -3.630321, -1.795355, 2.890971, 0.635473, -0.263253, -2.427855, -5.310596, -3.608562, 8.845384, 3.871068, 6.310348, 0.707003, -0.342756, -0.087262, 8.172073, 0.206082, -0.337659, 3.109874, -0.739516, -6.653020, 3.656507, -0.076342, -9.071382, 7.765026, -0.710259, -2.233625, 1.765141, 1.391488, 6.131129, 1.656170, -1.658921, 2.189786, -2.161172, -0.338138, -1.265457, -2.170792, 0.113237, -0.801543, 2.488951, 1.887169, 0.242786, 2.324345, 3.300882, -2.212380, 3.148038, 2.236308, -0.230763, 0.751805, -5.793574, -2.214365, -2.644563, 1.770735, 3.126152, 2.110815, 4.793191, 7.258339, 0.048348, -3.515946, -1.813341, 1.869273, 4.866038, 1.926262, 3.972730, 2.385604, -1.848568, 0.219905, -9.625678, -1.069563, 3.211653, -1.623303, -1.982656, 1.073308, -5.629014, 0.353502, 0.214252, -0.040037, -3.635118, -5.762735, -5.934147, -8.640684, -2.239116, 4.444191, -4.452192, 1.294073, -0.977634, 1.506236, -13.821466, -4.855133, -5.454500, -2.171217, -0.434645, -2.141109, -4.326273, -0.766056, -1.166658, -6.828095, -3.992313, -7.985675, -0.027156, 5.481761, -7.887405, -2.372142, 5.611740, 0.888385, -3.239222, -3.584783, 3.417515, 0.021144, 0.135067, 0.688556, 0.110383, 1.354925, -0.585762, -4.328357, -1.287105, 0.332779, -1.603604, -1.109169, -0.901873, 8.353974, -1.116070, 1.628976, 1.830307, 1.022571, 1.062974, 0.299776, -5.153921, 2.923855, 0.328887, 4.727024, 2.666137, -0.156452, 6.533584, 0.907966, 2.890931, 0.850553, -0.256655, 2.195430, 1.751886, -3.059696, 3.390126, -7.620480, -1.086390, 0.235307, 3.446947, -3.659053, -2.235693, 3.577014, 4.124716, -5.995736, 0.100759, 1.376123, -1.296779, -0.203487, 3.080584, 1.898631, 3.431888, -0.008696, 1.994654, 2.499069, -0.635386, 3.471577, 0.905813, 0.630546, 6.562492, 1.271718, 3.713083, 1.098022, -4.175727, 4.026644, -0.257966, 0.606633, 1.860236, -0.018629, -5.897700, -6.301016, -5.071455, -0.185444, 0.499724, 10.935959, 1.322391, -9.634394, -0.000759, 4.677254, 3.396303, 1.513356, 2.621404, -0.464442, -2.804390, -1.339561, -1.437342, 3.300574, -0.147449, -0.209068, -0.165322, 1.059605, 1.022480, -2.444871, -0.751516, -4.006464, -2.984862, 0.179842, -1.123377, -2.404706, 3.201952, -5.689353, 5.978168, -3.141390, -1.092880, -3.688441, 3.623991, 0.292171, -3.090468, -0.342997, 7.975073, 6.717894, -4.216690, 3.078193, 6.105803, 0.494769, -5.520638, -3.829310, -1.026091, -0.316878, -1.016270, 4.841892, -4.966675, -1.781183, 3.332318, 8.794328, 2.340522, -0.695414, -0.139798, -3.714368, -10.670636, -4.909167, -0.101089, -0.069161, 0.019572, -5.015254, -1.415708, 3.461821, -7.015650, -7.979800, -1.066470, -1.152448, -6.153243, -1.672579, 1.702864, -3.064969, 0.201905, -5.259581, -0.503686, 0.578921, -3.739371, 2.564897, -1.585426, 3.096273, -4.505628, 1.765620, 0.328798, -4.846415, -3.890337, -0.927081, 0.286746, 2.852578, -0.967980, -3.238074, 1.137814, -6.562785, -0.049307, -0.323686, -3.352189, -3.761620, 3.260206, -4.171178, -1.397603, 0.848810, -4.179512, 1.881929, -3.730735, 1.430817, 5.400832, -0.877720, -0.356905, -0.132025, -0.811912, -4.233059, 2.743707, 2.490922, -1.117086, -5.067132, -5.421246, 1.156588, -0.534440, -2.792426, 0.890560, 5.753962, -0.401602, 5.689643, -2.211078, -5.797209, -1.816319, 2.495221, -4.360130, 9.668447, -3.770986, -1.059373, -0.057251, -3.028395, 2.165006, 1.875862, 0.047700, -6.979061, 4.413591, 8.592163, -0.811140, 3.140186, -4.511807, -25.813583, -3.023678, 3.811153, 1.938581, -0.339177, -2.171618, -1.244932, 4.403308, 0.496575, -9.532254, 1.335673, -2.444872, 11.699122, -2.274694, -3.430034, -1.042132, 0.780243, 6.916922, -1.139536, -0.345397, 4.638951, 1.473616, -0.682799, 2.614158, -3.584642, 2.728699, 4.555742, -7.218204, -2.331289, -0.233210, 2.100440, -0.153805, -1.635176, 2.062756, 2.943550, -3.336209, 11.831157, -0.978651, 9.067150, -5.480848, 5.658813, 1.684263, -1.781923, 3.576041, -0.016443, 0.407981, 2.535778, -1.469584, -4.646076, -4.245326, -3.564281, -6.245006, -7.484807, 0.828859, -6.765895, -6.919433, -1.568706, 3.538174, 0.000744, -1.663006, 1.832614, -0.309570, 0.843011, -4.510295, -7.789219, 0.102095, -0.547336, 2.566236, 6.499871, -9.986823, -0.168010, 0.102029, 0.673290, -0.269726, -3.662649, -3.644698, -2.213008, -2.874984, -1.793331, 2.448561, -0.164852, -0.012539, 6.463037, 0.000991, 0.304642, 6.797293, 0.494902, -0.983908, -0.078490, -6.651207, -2.543651, 3.905650, -2.124230, 0.081256, -0.905935, 0.684888, -4.545033, 2.243226, -2.221475, -3.098726, 0.080649, -1.958687, 11.438409, 0.433618, 0.742591, -0.007383, -2.748674, 0.274265, -5.529784, 3.973581, 3.241938, 4.704930, 3.234245, 3.328490, -6.592916, -4.131082, -2.005181, -5.353984, -3.514174, -0.606298, -0.861970, -7.022537, -1.764758, 1.116245, -5.607377, -1.402223, -4.620993, -7.055652, -2.674875, 0.019377, -1.363322, 0.563650, 2.075496, -1.449947, 5.376159, 2.146081, 5.126253, -4.720733, -0.006775, 1.399573, -3.728381, -8.535139, -0.334004, -0.113797, 2.193818, 0.744583, -0.097518, 5.015729, 5.816702, 3.893484, 1.053184, -6.846307, 1.756239, -1.936607, 3.071102, 3.401009, 4.862002, 1.804502, 0.325939, -2.104714, 0.693890, -0.092639, -2.710700, -1.181900, 5.106908, 3.439631, 2.144355, -3.359043, 1.276522, -7.383888, -2.821352, 0.828196, -0.178993, 1.842888, -0.873826, -0.014114, -4.152460, 0.723015, -0.174186, -5.257160, -2.447183, 7.731831, 0.718454, 0.955554, 1.334993, -0.563071, 3.208822, -3.113315, -4.419868, -2.523047, -4.498507, -9.013672, 1.890269, -3.316503, -6.352427, 0.556511, 0.470561, -2.218465, 0.364930, 2.611593, -3.370936, 0.117874, 6.139898, 2.874041, -7.497670, 0.884382, 0.362684, -10.100249, 0.175558, 4.680916, -1.238638, 1.861386, 0.821007, 2.485271, -1.027795, 4.229118, 3.602708, -0.969462, -0.012795, 4.004239, -6.488414, -4.214726, 3.788011, 3.550853, -4.682927, -1.719694, 7.574303, 7.788897, 0.445002, -3.190887, 3.726520, -0.766279, 2.670411, 1.586039, 0.052926, 1.654625, 5.357676, -6.195482, -0.161717, -1.803791, 5.452054, -4.454659, -2.083320, 3.195639, -0.300326, -0.297610, -6.874451, -6.002811, -2.148858, -2.301163, -1.147759, -0.225648, -4.563822, -1.620989, 1.727572, 5.926391, 5.838719, 3.384335, -2.638520, -2.990250, 1.887299, -0.443030, 0.055715, -2.306771, -1.884962, 3.769308, 2.636223, -1.766434, -1.566344, -0.433667, 0.354005, 3.833169, 0.701964, -1.986933, 7.594246, 7.029210, -5.287083, -0.383797, 0.372847, 1.055878, -7.243296, 4.116444, -1.470766, -2.081233, 0.751267, -1.407273, 0.047860, -4.575079, 1.846502, -2.084986, -4.731189, -0.288104, 3.557981, 1.617163, 6.832978, 0.241575, 3.541188, 1.288880, -0.655247, -3.033360, 4.721385, 1.745900, -1.519267, 1.338891, -5.970143, -0.026291, -4.531422, 1.013936, -0.677987, -1.631544, -1.426714, 0.050153, -1.304231, -7.678109, 0.011271, -7.539436, -2.284989, -2.680998, -0.004160, -1.347320, -4.380244, 1.565084, 3.312843, 1.413599, -0.669517, -3.681749, 0.709569, 1.999314, 0.412539, 5.065418, -3.437043, 1.724517, -3.085526, -3.611425, -0.523062, -0.010791, -0.733256, -0.511650, -0.947212, 7.544014, -9.780771, -4.079500, -8.355167, -0.197175, -0.205433, -2.260860, 3.004659, 6.327532, -2.503898, 0.466260, 4.451628, -1.959501, 5.972856, -3.135310, -7.903263, 3.861253, -2.754281, 3.395368, -3.378115, 5.876959, -0.023305, -6.324285, 4.745381, -1.885606, -2.357712, 1.494764, 3.842016, 1.277511, 2.445165, 0.410928, -7.133320, -7.055686, 1.454283, -6.182934, 2.136036, -6.857017, 0.000498, 0.517878, -1.397246, 5.371858, 6.068283, -3.182340, 2.911753, -1.591159, 1.384250, -2.744489, -4.217806, 3.245556, -0.992994, -1.537308, 0.531573, 1.309127, 2.590945, -0.120438, -2.848826, 5.013155, 1.521207, 5.516780, 6.891029, -4.153908, 0.404247, -3.702703, -0.710221, 0.207040, 5.352168, -2.242288, -0.374315, -5.540435, 3.889261, -4.950356, -4.913716, -0.618769, -0.173921, 3.692221, 3.737804, 1.018407, 6.317423, -1.107868, -0.509221, -2.250231, 1.382472, -0.708276, -3.613082, -1.943448, -3.317917, 1.306921, 1.475165, -1.240517, 3.184206, -1.496189, -5.016690, 0.877661, -2.169623, -6.401884, -2.942557, 11.352693, 2.099488, 0.614432, -1.606878, 7.248198, -0.111037, 3.277194, 1.628116, -4.641020, 5.924992, 0.029633, 0.192731, 8.365792, -6.858413, -5.763559, 0.514552, -5.286735, 1.072456, 0.759393, -0.643198, -0.601689, -0.001599, 3.493394, 2.978892, 4.046279, 6.537354, 2.684996, 3.459695, -2.992692, 1.694266, -0.984646, 1.080792, 0.768374, 0.117237, -0.996767, -4.296034, 6.863399, 1.729657, 1.903897, 5.195855, 0.792808, -0.257125, -0.098713, -0.631066, 0.669095, 0.997432, -6.579898, 2.109600, -3.509987, -0.903696, 5.622768, -0.303515, 3.135252, -1.116153, 6.057675, 0.912521, -0.031544, 0.424326, 1.687479, 1.641292, 4.212925, -1.859976, 0.283334, 1.681501, -0.374205, 0.151593, 1.310990, 3.303145, 7.866085, 3.097261, 5.917388, 4.298635, 3.983059, -0.449333, 1.928347, -1.525197, -0.838800, -2.377027, 1.334727, 5.235521, -3.699142, -0.407828, -0.448997, -5.229319, -1.974771, -2.172120, 9.177399, -2.846216, -1.863153, 0.476969, -0.512451, -7.109721, 0.069376, -4.748658, -3.194703, -2.120162, 0.813585, -0.148194, -7.338431, 2.688310, 3.676924, -5.626202, -3.342699, -0.141512, -6.641322, -1.594967, -2.834696, -3.028884, 1.248042, -2.363557, -2.148775, 1.365464, 1.530013, 2.991108, -1.414532, 4.466099, 1.017612, -3.484221, -2.224066, -0.484973, 2.508795, 1.450046, 10.966900, -0.352431, 0.076239, -3.460869, 1.961258, -0.589401, -5.012602, 4.541590, 0.058453, 0.498920, -5.368912, 6.354663, 4.114835, -4.584216, -1.445249, -0.581226, 0.452248, -1.578423, -3.973688, -0.795710, 6.948200, 2.276614, -3.563425, 4.821636, -4.619415, 0.797093, -3.595433, -2.219263, -0.506839, -1.986749, -3.095446, 1.296894, -0.226968, 25.884138, -7.859181, -0.513497, 0.946560, -6.172439, 3.309793, -0.158353, 0.322889, -0.067943, 1.376138, 8.334361, 4.483156, -0.709400, -0.175756, 1.210750, -0.618878, 4.989367, -2.063022, 1.482588, 4.259482, -4.579537, -1.819578, 4.079937, 4.017149, 0.243652, 1.662365, 3.750094, -3.864753, -4.896174, 2.753638, 3.391126, 0.700742, -0.412252, 0.236695, 1.602127, 1.201747, -2.840552, 1.189786, -6.024556, 0.086590, 2.327789, -2.947853, -1.355535, -0.952914, -3.780833, -4.136289, -4.310068, 4.045132, -3.136507, -7.267282, -2.318031, 2.078475, 1.244233, 3.712967, 0.069280, 4.239871, 0.085674, -0.501848, 1.826206, 1.723327, 0.948976, 3.317792, -1.361878, -9.505491, 1.652231, -4.323700, 8.702504, 1.165740, 0.077295, 0.598673, -4.058940, -1.280203, 1.805292, -3.056866, -0.153063, 4.944287, -0.401268, 1.127354, 0.800949, -4.552731, 4.099965, 2.931844, -6.894278, -1.693199, 3.370374, 0.377143, 0.819665, 3.199957, 2.406297, 0.611619, 2.428483, 2.364851, 1.625717, -0.097511, -0.764180, -0.023583, -0.932676, -5.791678, -4.254630, 0.696105, -2.473110, -6.286937, -4.430090, -3.154974, 2.485222, 4.677386, -2.933256, 6.317354, -1.070770, 0.032252, 7.103585, 0.443433, -6.901791, -2.766052, 0.978416, -0.820062, 4.384764, 1.022396, 6.335260, 2.096919, -2.545128, -4.920263, 1.549829, -6.143991, -4.316087, 0.086893, -0.475092, -5.423429, -3.094475, -2.882592, 0.627850, -2.327073, -3.407857, -10.233243, 1.028297, -3.554084, -0.041444, 6.200572, -1.833720, -0.137024, -3.761790, 3.412513, 3.724448, 2.632896, -5.099319, -0.186855, -2.287660, -1.638959, -2.688552, -0.361400, -0.437027, 2.332490, 0.589219, -7.016140, 3.163738, 0.533234, -2.952399, 1.867289, -3.524182, 4.616333, -0.002991, -0.354729, -2.023196, -4.608538, 2.560603, 0.215824, 1.806846, 0.561700, -0.379066, 0.393609, 0.415946, -2.526092, 0.359140, -1.839410, 5.859710, -4.694609, -1.859303, -3.433860, 5.516655, 5.651124, 6.562935, 1.522503, -4.638336, -1.475609, 4.560638, 5.664787, 6.312329, 3.594746, -0.803987, 0.202642, -1.195277, -3.372053, -0.752331, -5.590456, -4.646204, 3.535979, 0.516356, 7.138364, 0.744697, -2.785372, 0.392401, 1.610076, 0.116163, 1.405722, 3.875001, -2.949129, 3.472404, -0.491449, -0.467863, 1.288103, -1.870304, 2.466842, 4.456291, -0.124447, 2.135671, -2.846490, -1.165408, 5.972806, -1.132864, 7.914446, 0.508462, 7.200937, -1.835051, -0.482001, 2.587946, -2.791816, -0.153601, -0.906362, -2.069110, -0.459997, 0.456048, 1.558618, -3.118214, 0.856418, -0.231932, 3.516111, -2.733642, 0.631028, 0.245109, 0.034195, -6.143626, -1.420549, 5.880464, 1.088104, -2.628313, -0.914301, -0.426224, 4.995555, 1.648484, 0.431542, -2.718431, 0.215640, -3.991919, -3.036944, 4.598671, 1.975244, -0.613328, -3.461954, -2.062270, 0.318039, -0.597685, -1.040276, 0.786417, -0.262567, -0.589535, 0.393135, 5.124829, -3.209585, 4.304561, -5.214456, -4.707873, -7.520905, -6.979288, -2.319449, 2.466626, -1.716434, 3.854492, 5.168052, -3.225215, 4.064557, -0.890216, 5.347986, -0.033329, -3.681368, -2.246211, -2.044171, 0.322070, -4.800444, -4.250727, -4.454211, -3.514904, 2.655819, -6.546813, -1.598998, 4.023914, -0.005552, -6.960735, 0.298785, 2.436974, 2.026727, -6.772633, 0.199327, 1.160285, 1.774645, -3.146263, -7.526332, 1.900193, -4.620900, 5.042796, -1.024451, 0.878534, 0.017084, 0.904181, -2.732744, -8.174599, -3.099883, -0.999880, -0.198049, -0.483228, -4.134248, -6.123836, 0.685489, 5.716204, -0.749494, 0.604121, -5.012710, -0.446452, 2.066836, -1.172677, -0.399439, 4.908616, -0.771081, 4.457398, -2.098842, -0.501791, -2.526716, 0.771477, -1.061811, -3.930525, 8.341999, -0.437698, -0.275091, 3.580389, 1.218396, 0.175618, 0.197714, 1.028314, 1.273843, 6.223643, 1.182448, 10.747875, 1.041242, 0.109011, -11.229621, -2.388422, -0.528528, 11.054355, 2.281246, -2.086136, 3.279294, -3.566483, 2.603531, 0.445095, -0.063839, -2.898587, 2.457841, -0.759582, -7.168111, 4.830601, -0.343200, -0.612604, 3.420349, 4.547206, 7.285605, -1.936344, -0.264012, 0.621680, 10.183675, 0.325389, 0.015092, -3.890042, 7.610522, -4.370181, -8.066639, 5.763288, -2.264709, -2.287764, -0.089371, -4.009732, 1.703158, -0.996195, 1.862794, 4.200593, -0.008101, -1.215292, -0.663909, -0.310804, -1.607721, 9.467325, -3.277249, -0.012757, 0.231754, 0.034934, 0.492380, 1.150235, 4.284363, -1.196000, 0.128064, -2.985998, 6.067877, 1.931229, -0.213001, 0.453486, -1.781323, 2.348450, 0.097704, -0.468235, 0.413639, 7.249700, 3.779612, -0.549643, -0.050774, -0.031199, 2.873239, -0.768314, -0.040047, -0.828497, 1.118836, 0.246845, -6.382168, 0.637833, 0.451397, -2.622777, -0.133653, 6.433577, 4.867972, 3.546211, 3.398878, -1.906015, 1.196999, 2.006929, 0.786734, 4.891016, 2.503478, -2.741007, 4.865725, 1.387283, -11.435909, 1.734073, 1.678593, 0.398326, 8.134402, -3.630407, 2.569750, -0.513990, -2.003817, 3.147748, -3.479194, -4.521346, 2.160461, -4.760503, 1.411289, 0.989127, 2.281021, 3.922068, -0.155595, -2.587337, -0.547264, -4.417817, 2.839503, 1.478019, 0.928264, -1.520962, 2.300986, 1.824427, -2.054479, 1.750956, 0.670408, 4.240636, -4.206879, 0.825712, 0.170197, -1.309047, 7.042284, -2.808839, 1.278867, 0.675022, 2.942729, -9.454078, -2.041019, 0.441376, -0.163395, 3.919055, 5.771101, -1.755992, -0.506141, 1.130097, 0.115922, -0.116327, -6.025976, -4.385695, -1.123508, 3.069795, 0.695987, -4.680645, -4.437606, 3.878303, -1.635001, -3.849114, 1.502949, 4.205227, 0.935144, -6.548295, -0.718375, -2.716839, 1.475906, -2.302369, 2.143170, -0.129654, 0.392791, -2.577940, 1.126844, -0.290627, -0.063034, -1.154525, 6.816575, -2.688456, 3.070588, -3.057046, 0.002165, 0.273359, -4.180090, 0.614740, 1.170131, -2.350872, -0.827711, -0.156118, -2.305817, 4.330891, -0.710802, 11.884938, -0.380077, 0.110313, 7.602226, 10.002245, 0.360508, 1.909266, 1.339925, -2.373763, 0.836318, 2.776081, -0.694188, -3.979372, 6.721390, -0.181755, 2.983377, -0.601471, 0.478235, 5.683246, -3.012722, -1.348395, 2.100513, 6.260212, 0.034391, 1.333329, -10.040885, -4.544642, -2.925191, -1.400667, -7.342693, -3.248511, -1.108893, -4.364802, 2.973909, -0.389134, -2.340841, 4.763739, 2.008492, 6.680654, 5.412335, 4.818300, 1.780283, -0.424301, 0.043778, -2.362116, -0.146802, -0.958689, -2.140199, 3.908817, 9.249252, -2.930516, -2.118222, 10.984769, -0.066360, -0.164985, 5.758742, -0.156840, 3.128972, -2.280814, 7.105916, -0.001550, -6.674235, -3.249885, 2.993154, -2.397078, 2.758408, 13.904146, 4.447309, -0.554369, 0.982139, -1.688421, -0.897859, 1.484438, 1.535573, -2.792843, 3.868578, -2.332367, 1.260296, 1.206884, 0.913132, -0.640889, -1.967816, -6.116515, 0.412191, -2.647040, -0.008041, -2.644320, 3.687829, -1.048893, -0.283870, -4.292102, -0.878158, 0.003143, 3.007888, -6.075198, 7.674708, -3.085419, 1.250305, -3.839580, -4.304334, -4.159411, 2.063132, 7.935884, 2.059508, 1.354737, -7.109372, 0.422387, -3.505325, 6.440124, -0.709708, -0.266126, 0.004942, 5.169637, 0.711437, 4.431763, -3.712740, -6.830352, 2.374137, 6.030969, 2.813872, -0.828941, 5.753105, -3.969281, -0.101831, 1.111752, -0.577075, 1.980342, 5.342420, 0.830509, -0.849474, 2.333640, 4.110284, -10.945628, 1.495124, 6.051959, -0.124720, -2.623258, 1.853333, 5.212372, 0.415820, -2.715355, 0.206931, -0.128600, 1.746347, -0.155745, -0.233455, 2.918806, -0.094414, -2.185907, 2.838618, -1.284784, 2.813802, -0.762434, 3.002384, -0.102323, -0.058628, 7.423843, 1.276592, 1.294652, 2.240631, -0.610631, -7.287424, 1.418261, 0.816840, -2.684842, 4.845943, 4.965572, -1.209319, -1.404999, -4.692338, -6.659638, 0.113021, 1.394000, -7.399866, 5.147840, 1.526794, -2.491843, -2.009047, 2.508059, -0.245668, 1.353432, -9.987974, 2.926514, 3.554738, 4.995962, -0.890090, -1.132992, -5.233229, -2.454151, 0.138648, -3.343158, 3.218903, 10.731729, 0.179697, -4.158245, -5.648637, 3.967639, 1.781262, -2.466896, -2.424516, -2.444896, -5.249668, -4.497662, -4.044394, 0.243171, -4.846896, 1.963107, -4.203612, -4.168442, 2.909922, 7.474030, 0.762696, 0.948857, -1.459502, 1.139815, 3.069055, -3.323893, -1.106123, 0.959271, -2.972250, -1.056439, 2.845901, 3.328005, 1.591034, -6.209545, -0.361117, 0.519224, 5.367730, -0.263531, 1.660156, -1.372960, 2.819782, 4.583301, 0.053495, 5.665277, -5.296394, -0.025112, 2.557382, 2.700260, 2.188862, -3.309154, -0.160360, -1.019788, 0.422536, 0.359501, -3.985848, -4.308410, -4.981796, 1.889882, 1.053905, 4.603822, -3.071977, 6.326282, 1.321898, -0.012240, 0.090043, -0.344419, 5.147530, 2.265015, 4.738169, -0.093961, -3.811850, 1.047064, 10.157013, 4.743258, 7.141965, 1.066689, 2.954718, 0.296469, -1.034733, -1.710455, -0.532571, 4.611936, -0.669968, -1.541446, -0.887973, 4.762004, -4.973036, 1.581993, 4.105812, 1.056270, -2.201197, -1.150516, 0.819976, 0.824889, 5.638547, -1.001109, -2.248711, 4.272346, -1.889298, 0.173297, -2.583799, -3.014771, 0.381561, -0.058445, -10.554512, 0.568557, -4.030552, 1.063237, 6.562668, 1.954355, -0.822928, 0.779761, 4.135376, 5.057029, 0.432285, 6.942090, -1.834300, -0.701786, -5.246745, -3.839270, -8.019054, 4.516356, -3.107868, 4.292901, -1.098935, -3.562215, -6.332080, -3.492798, -0.993626, -1.121348, 0.815774, -0.545536, 3.372131, -2.094609, 6.012065, 2.027513, 0.257524, 4.272281, 0.485950, 0.883503, -3.525961, -8.162830, -0.288104, -1.624345, 2.529218, 3.295431, 4.993791, -6.329498, -5.746292, 2.473589, -2.429946, -1.117752, -13.519817, -7.268318, -1.118978, -2.724793, -0.335434, -0.237011, -1.577320, -4.367187, -0.083867, 3.224887, -2.845260, 6.178665, -5.798835, 3.059986, -3.558267, -3.499238, -1.651811, 3.329851, 1.396718, 2.873462, 5.183454, -2.470144, 0.299798, 1.683276, -0.754664, -0.557604, 0.097709, -0.316821, -1.537827, -0.769508, 2.565984, 1.490902, 1.196525, 1.016349, 1.122763, -0.129937, 1.247755, 4.268309, -2.340124, -3.855088, 0.941537, -0.381153, 3.338399, -3.204222, 5.272754, 1.381338, -3.719207, -9.036061, -8.605568, -4.693647, 2.356426, 2.091722, 0.295684, 0.956897, 2.216397, 0.641491, 0.880784, -1.044372, 0.020868, 3.880427, 1.597131, 5.130661, 4.724403, 2.608148, -2.235970, -8.938436, 2.472075, -1.878265, 1.717556, 0.341884, 12.340206, -0.165873, -6.448755, -11.655176, 0.972309, -2.180458, 2.936051, 0.447728, 11.636771, 9.458894, -1.738721, -1.681901, 0.447583, -0.827482, -0.076822, -0.131987, 6.703393, 2.605911, -0.062193, 0.333883, -0.090670, 1.583676, -2.702961, 0.340563, -7.199040, -2.043446, -0.057329, 0.345143, 1.823417, 4.900608, -4.105616, 0.491261, 4.201406, -3.536129, 1.356752, -0.029528, -3.808081, -11.491957, 9.798372, -0.184595, 3.900829, -0.025702, -0.447673, 2.083195, -0.743767, -2.523749, 4.305879, -3.468534, 0.310016, 2.645331, 4.062176, 2.604977, -0.487273, -0.001471, 0.569222, -4.263353, -6.549046, 0.947340, 0.946195, 3.979799, -0.127920, -3.617949, 0.173315, 0.038247, -1.671738, -4.277497, -4.186216, -4.471751, -0.226938, 1.509640, 5.555100, 0.017173, -0.773783, 0.971961, -1.275968, -0.076477, 2.977075, -1.405722, -0.281382, -4.553804, 0.406450, -0.710510, -3.648431, 1.768343, 1.893957, -3.130730, 0.581914, 2.696048, 0.928965, 4.308264, -1.598643, 3.463273, 0.048406, 6.637488, 0.153922, -6.169127, -2.057266, 3.258104, 3.054747, -0.000057, -0.026145, 1.591739, -0.023785, 0.233321, -4.133327, 0.662224, 1.097311, 7.185622, -5.549306, 2.042615, -5.847503, 0.172207, 1.538699, 2.668698, 5.988052, -0.863076, -0.585639, -5.556726, 1.846621, -0.951853, -3.809459, 9.860162, 5.942457, -5.704624, -2.869631, 0.010843, -2.641475, 2.167218, 1.833458, 0.296820, 0.634770, -0.741936, -1.644975, -3.141125, 3.339068, 0.776640, -1.568341, -3.492356, -0.470710, -1.309820, -3.142795, 1.264116, -3.490557, -3.572397, -1.651310, 1.131271, 2.926528, -0.616492, -3.792120, -0.214447, -2.344300, 5.790883, -3.102818, 8.839439, 0.409714, 2.168245, 0.202377, -0.454987, 0.519873, 8.175124, 0.427410, -9.902246, 1.130025, -0.513523, 1.867186, 5.343970, -1.920144, -4.094131, 1.771582, -1.935505, -5.532904, 4.878136, -0.060956, -0.026615, 0.446926, -2.016111, 7.120039, -0.424641, -0.061255, -0.593578, -1.958208, -0.391602, -0.472750, -0.758140, 1.507612, -2.936864, 0.195067, 2.777847, 1.675735, 4.349537, -0.571986, -3.606022, -6.699798, -0.564082, 4.571085, 0.327201, -1.905577, 5.581552, -0.342707, -3.362906, 4.729235, 1.089156, 5.635921, -0.014798, -1.261189, -0.332104, 0.609498, -1.647114, 1.669553, 0.374264, 3.282902, -1.752555, 3.738577, 1.981194, -0.869527, -4.507329, -1.822255, -1.375043, 0.361820, 5.513688, 0.230618, -1.481770, 1.364776, 0.650051, 0.102176, -0.001490, -0.888542, 0.217108, -4.400694, -2.460488, -2.142963, -3.445317, -2.452469, 6.746197, -1.921431, 9.492379, -2.325576, 0.405751, 2.609226, -1.407998, 0.986092, 4.514874, 0.108704, 0.603596, 1.750442, 1.714508, 4.789082, -1.440571, -0.878463, 1.520678, -1.585889, 0.105334, 7.194630, 7.006618, 1.133277, -0.062292, -6.020393, -5.635540, -3.064330, -2.972053, -0.993023, 1.618374, -0.057395, 1.493672, -0.789785, -5.251556, 0.056737, -6.214289, -7.015460, 2.780783, 3.248916, 5.244694, -2.412669, -0.188875, 2.366927, -4.429873, -0.040983, 4.134392, -4.651297, 1.849452, -2.512143, 0.114833, 0.979572, 0.065521, -2.298851, 4.910531, 4.155065, 0.352131, 0.151823, -2.360681, -4.953442, -0.555213, -0.335283, -4.150012, -3.033337, -5.318827, 1.216185, -4.718475, 3.039701, 4.347839, -0.186076, 3.013520, -0.885484, 0.199431, 5.056513, 1.274066, 3.330811, 1.071532, 2.307818, -1.231845, 2.970392, 1.831645, -2.448898, 3.736881, 0.057759, 6.679784, -2.403128, -9.787365, 2.563922, 3.247179, -0.012910, 1.123178, -0.634490, -0.009691, -0.005552, 0.855638, 0.113647, -0.269984, 2.792508, -0.743296, 3.500570, -1.032244, -2.892419, -3.963207, 7.623781, 4.205984, 3.480365, -5.277036, 1.330614, -3.268126, 4.517873, -2.923351, 5.408849, -0.006212, 2.714594, -0.022754, -1.552000, 1.460807, -3.763390, 0.410225, -5.409842, -0.065908, -7.934735, 4.239173, -4.070442, -1.405998, 0.642291, -2.410548, -1.833019, -6.965807, -4.474487, -0.279187, -0.371922, -0.142775, -2.046949, 0.070080, 0.163394, -0.184711, -0.168619, -4.530483, -5.300485, 1.628138, 5.035965, -4.569588, 2.654992, -3.110272, 2.006499, -1.726934, -2.530666, 3.108309, -2.464334, 0.137819, 0.004644, 2.702362, 1.602196, -6.397662, 0.931750, 1.214246, 4.497550, -0.881810, 2.550910, 3.810047, 1.506102, -0.908724, 1.272825, -3.227784, 2.377525, 4.363932, 0.019257, 1.587560, -0.121363, 1.177220, 3.055651, 0.946687, -2.825658, 0.819505, 2.376204, -1.820166, 4.814208, 6.989204, -2.373306, 3.306972, 0.416947, -3.589711, -4.721005, 6.229137, -2.443368, -2.988649, 0.354574, -3.521739, -6.654729, -3.289409, 3.314070, 3.655743, -1.040791, -0.306735, 5.773582, -7.453763, -5.359430, 0.426405, 1.028669, 1.241614, -2.126209, 4.864559, 2.336204, -1.985213, -0.902069, 2.699632, 1.624854, 3.709470, 0.671311, -2.212065, 3.601441, 1.050618, 6.903223, -1.270935, 0.316807, 4.597841, -0.974303, 1.224436, 1.375242, 4.650789, -1.354023, 3.387038, 3.106138, -0.427633, -1.999412, -14.263725, -0.312913, 0.063937, 3.454380, 0.853375, -3.006591, -2.441993, 0.583795, -2.939463, -1.243754, -1.021871, -3.958032, 1.048049, -8.227103, 0.404519, -1.017159, 0.273273, -3.723046, 1.759197, -3.746738, -4.732924, -2.561224, -1.146303, -0.398051, -1.964425, 1.913997, -1.476784, -0.029199, -0.700788, -4.500231, 0.482127, 3.013607, 1.401778, 3.882581, 1.152152, -5.617946, 0.144557, -1.759709, 6.107874, 4.922176, -5.179372, -0.909929, 0.649689, -0.757023, -1.680153, 1.275247, 2.041158, 1.328913, -6.673042, -0.273911, -1.765330, 8.019628, 3.207435, 1.077303, 0.203352, 0.931596, 2.524080, -3.727402, 1.081128, 2.175904, -3.343817, -3.746030, -0.314732, -0.332013, 5.270098, 1.829976, -2.130228, 0.614104, -2.692844, -0.289054, 2.551070, -1.460109, 2.645048, -1.196309, 0.115082, -3.896767, 9.656350, 1.459867, -0.003529, -4.774947, -5.914416, 0.215330, -3.432626, 1.021704, -2.312573, -7.024405, -1.849720, -3.092340, -1.368001, 0.188480, -1.194326, -0.637599, 0.122717, -2.641920, 3.492096, 4.106804, 9.270782, -0.035160, -0.245241, -8.323165, 4.888138, -2.226707, -2.994345, 2.020879, -0.395241, -0.179746, -3.197128, 1.265654, -0.532883, 3.058285, -2.062794, 3.207631, -9.054969, -2.864531, -0.871130, 9.117023, -2.347618, 5.941471, 2.077816, 0.883953, 8.118439, -0.521390, -0.238949, -3.748918, 2.144395, -1.129512, -4.516557, 0.313121, 0.936105, 2.460839, 0.088672, -3.876451, 5.289827, 4.853863, 2.688495, 0.964901, 5.693359, -0.643778, 2.647713, 5.704851, -3.993457, -2.516671, 1.483038, 0.399039, 0.102820, -1.324987, 4.249714, -4.987561, 0.594573, 2.687088, -5.280706, 0.778387, 5.244863, 6.302296, -2.648989, -7.877838, 2.290410, 3.640933, 0.739566, 0.178788, -5.616697, 4.014147, 3.964926, 1.532802, -0.431018, 2.901000, 3.186200, 3.876082, -1.122623, -0.058375, 4.122059, 0.479492, -2.288769, 1.988854, -1.392784, -1.931579, -3.573721, 3.813021, 4.792922, 4.041496, 0.563931, -9.098000, -1.356132, -2.051821, 3.282480, -1.566306, -2.023037, 0.979644, 0.447274, 6.022537, 2.812065, 0.162424, 0.206939, 2.453523, -4.538718, -2.030375, -1.488082, -3.197017, -3.449918, 2.998883, -0.468020, -6.209292, 2.547401, -0.614768, 5.984511, -0.104435, -5.531857, -4.084041, -0.370942, 1.875537, 1.204635, 6.465639, -0.539545, 1.725069, 6.197456, 0.626320, -9.669788, -6.096525, -0.276844, 8.580634, 1.099832, 1.591415, -1.389589, 2.436423, -6.431682, -0.303867, -0.810074, 1.452129, -0.262077, 3.641073, -0.903035, -1.185559, 10.483525, -1.386117, 3.861802, 1.176159, -4.542426, -0.058838, -1.015140, 5.164169, 0.251072, 0.384475, 0.281439, 7.947507, 0.877360, -0.576882, 0.495953, 1.086257, -5.852773, -2.767107, -2.267903, 0.561476, -1.084434, 2.393119, -0.097898, 0.440350, -0.484027, 0.776798, -7.258694, 1.215970, -1.567899, 1.339548, 2.033061, 2.752798, -5.560112, -0.077397, 0.721023, -6.203930, -1.695403, 5.537146, -9.447447, -2.697748, -0.043759, 4.304340, 1.709005, 0.812581, 0.370129, 4.800770, 0.030635, -4.151722, 0.046181, -0.475627, 0.006197, 0.537970, 2.710606, 1.816996, 6.561518, 0.318119, -2.328600, -1.882591, -8.602366, -9.410439, -1.652405, -0.953572, 0.347174, -3.470130, -0.000711, -5.144109, 0.624943, -2.083126, 2.767149, 1.315560, -2.545255, 3.375787, 2.593396, -2.273217, -3.422366, -7.904332, -6.225791, -0.956074, 5.351878, 1.800510, 1.912785, -1.543061, -4.738302, -4.004639, -6.792346, 9.098534, -0.620093, -2.108139, -5.154526, 8.080357, 5.068964, 4.343806, -0.157681, 1.984312, 0.459727, -5.038471, -2.661160, 0.055840, 10.577642, 4.620853, -3.637174, 5.560658, 1.916229, 0.654499, -1.998095, -4.237348, -3.232992, -1.830947, -1.068578, -1.617900, 0.714291, -4.043469, -0.444917, 0.978979, -2.584777, 1.277269, 2.386837, -6.305502, 0.121604, 1.412783, 0.653413, 3.036438, 2.768948, -9.744542, -3.689719, -3.452209, -10.608861, -3.236202, 1.729487, -2.927757, -4.701297, 11.400037, 7.034643, -7.286547, 4.691584, 4.084343, 3.604400, -4.923679, -1.813507, 0.658529, 3.454041, -7.046291, 0.589611, 0.037175, -3.552428, 3.408782, 1.121571, 0.614767, 3.284083, -4.109370, -10.938247, 0.442020, -0.121788, -3.744960, 0.031285, -7.516037, 0.109905, 0.176512, -5.390704, -1.114231, -2.214612, -0.265107, 4.384219, 0.553956, -0.502500, 10.455436, 1.120000, -8.095864, -0.778641, -5.600359, -6.534314, 7.474305, -8.409909, -0.855348, -3.587091, -4.347822, 6.141568, 2.773007, -0.330568, 2.153925, -0.351253, -1.056275, -3.540951, 1.026076, -3.707464, -1.645440, 0.000507, 0.304426, -3.583717, 7.039771, 2.526894, 4.688828, -7.581242, -0.782881, -6.695222, 5.090632, -0.006731, 1.689943, -0.842695, -9.328964, 2.591607, 7.422140, -3.216673, -1.951735, 0.333549, 5.436418, 0.078466, 6.354870, -2.104988, 3.388239, 2.421902, 6.996591, 5.495170, 3.211206, -8.029123, 3.962790, -3.307806, -3.314206, -6.477521, -0.223202, -0.522705, -6.183891, -3.008214, -6.073395, 3.447349, -0.096371, 2.344163, -4.739252, 3.953689, 4.222349, -7.004005, -0.052890, 0.999592, 9.236704, -2.130255, -3.619022, -0.149625, 2.876487, 2.704628, -0.794551, 7.147145, 5.903722, 3.915827, 2.059459, -4.207718, 6.486900, 7.151625, 2.032479, -0.075666, -0.138138, 4.182639, 0.131846, -4.178547, 5.530023, -0.760065, 0.969573, 1.061261, 0.406154, 5.338087, -4.981488, 4.180845, 0.882268, -1.054465, -1.154401, 6.672582, 4.544742, 0.086003, -6.785923, 7.909182, 1.706591, 9.751883, 6.775731, 0.225223, 0.388467, 5.400630, -0.121121, -0.677740, 0.644702, 0.522380, 0.170924, -1.159693, -2.598139, -3.585566, 1.972221, 9.582241, -1.873368, 0.874579, -4.828268, -4.376843, 3.810789, -4.812220, 0.718399, -1.180434, -3.242600, -1.566647, 7.654933, -4.102149, -2.279679, 0.137978, -0.368608, -2.755538, 0.816232, -3.242855, 9.129741, 5.765985, 5.019303, -3.263354, 0.339600, 0.010591, 1.132612, -0.977761, -0.090115, -4.913845, 2.898478, -0.007353, 2.086877, -6.258654, -0.351557, -0.749201, -1.551592, 0.136005, -0.046578, 0.083601, -1.886635, -0.607299, 2.785259, -3.973704, 0.619516, 3.986200, -0.534189, 8.397143, 3.932925, -3.002221, 5.126242, 3.359045, 6.020249, -2.376764, 1.726301, -5.409464, 4.890821, 0.601706, 2.109882, 2.187342, -2.695427, 1.940037, 1.926898, -0.002677, 0.021325, 0.830311, 3.520621, -3.227579, 5.155818, -3.496288, -0.099924, -4.218016, 6.323743, -3.572045, 2.323373, 0.158735, 6.494978, 0.897556, -3.062770, -2.858784, 1.566924, -0.033720, -5.879069, -0.101610, -6.846155, -3.554789, 11.750919, -0.712356, -0.338118, 3.835320, 1.461386, -1.816890, -5.919792, 1.644525, -7.327300, -6.579868, 3.293220, -2.693432, -3.955329, -1.005851, -6.254828, 2.498211, 1.940237, 0.481200, 9.656438, 0.240897, -1.265905, 4.347397, 10.950264, -12.237374, 0.502832, -1.690919, -2.828485, 3.525796, -4.257775, 2.051737, -6.492669, 2.024296, 0.391932, 3.784518, -2.389678, -4.416945, 0.625610, -0.293373, -0.037328, -0.517975, 1.649144, 1.160971, 0.506438, -1.131908, -0.038271, -5.668414, -0.442612, 2.382914, 3.323458, 6.836140, 1.910023, -3.950216, 4.875198, 1.890990, 0.551287, -0.219823, 0.455612, 0.012268, 0.026691, 2.258301, 14.445374, -9.955570, -0.627969, 2.279076, -5.863344, 2.019042, 3.147035, -1.781734, -4.277750, -1.705837, -1.741006, -1.453945, -9.678993, 4.473619, -1.069655, -2.861028, -2.588425, 3.709558, 1.780781, 2.657225, 0.088197, 2.650440, -8.729904, 4.577395, -1.823637, -1.182782, 0.195642, 1.754526, 7.484874, 2.348905, 4.355453, 0.010826, 2.071287, 4.483963, -0.253794, 6.190690, 4.231384, -0.523160, -6.299307, -3.804003, 4.693217, 3.173011, -4.272896, 1.136653, 0.333041, 1.018608, 0.213511, 1.778272, -3.588235, -4.582500, 5.411454, 3.166693, 7.529909, 2.292525, 0.173206, -4.227821, -0.042715, 1.165264, -5.945516, -0.610124, -0.699059, -1.251793, -2.387078, -4.381602, 3.461027, 2.618393, 1.338787, -0.930201, -0.910212, -1.132075, 1.220256, 1.061923, 0.302762, 0.794644, -0.929359, 3.629819, 1.467210, 4.339104, -2.303586, -0.993965, -5.887624, 1.497762, 4.370133, -3.421072, -0.475275, 6.531365, 0.235601, 1.784233, -2.123167, -1.381489, 0.110532, 1.492654, 1.945078, -3.295092, -0.426116, -0.323244, 3.869091, 5.252817, 0.504334, -6.985620, -0.277298, -2.830087, -5.356292, -0.472228, -1.735986, -3.340069, 7.439146, -3.908280, 6.130438, -3.379878, -2.424764, 2.630078, 0.913961, 0.217893, -2.439762, -0.628539, -1.042606, -0.567556, -0.233880, 0.490841, 0.322648, 5.644710, 0.252179, 1.523528, -2.084197, -1.378312, -5.650394, 3.655080, 1.314819, -2.665186, -2.316741, -0.376426, 0.134854, 7.187033, 0.010875, -2.177172, -0.075750, -15.161308, 3.850377, 3.657217, 1.629299, 7.332540, -1.443149, 0.232777, -2.259036, 3.654610, 5.705053, -3.882349, -2.769710, -2.980928, 3.978552, -2.359820, -0.975882, 0.004926, -0.316470, -0.414783, -4.365500, 6.066716, 5.737901, 2.187932, 1.104907, -2.137824, 0.134720, -0.570865, -1.314352, -2.219584, -4.537891, 0.768487, 0.344057, -0.738188, 2.604628, 5.180123, -0.436380, 3.891590, -1.033229, -9.669333, 6.242013, 4.502106, -0.920643, 3.596680, 0.872650, -2.550828, 5.600400, 0.001684, 0.401410, -3.392988, 6.081916, 1.505512, 0.062307, -8.342303, 7.472530, -5.081509, -1.685467, 1.848381, 1.293144, 2.564443, -8.243598, 3.942904, 3.708061, 7.888132, 0.992536, 0.877552, -1.515095, 0.999640, 4.167929, -1.893826, -1.390576, -8.402894, 9.010080, 0.834632, -9.559398, 3.922271, -2.288526, -0.235168, -3.975865, 9.512965, 0.854825, -0.581756, -3.401748, 2.896793, -0.529649, -4.515088, 0.067843, -4.050840, -3.646595, 2.280187, -2.033017, 2.446215, 4.681082, -1.848684, 2.697784, 4.558267, -1.920036, -2.755307, 0.266374, -13.230797, -1.098942, 2.747517, -4.841957, -2.449324, -1.905265, -0.159938, 0.753852, 0.028905, 2.370839, -7.374860, 2.476338, 0.326115, 3.034132, -1.270673, -8.472501, 1.206639, 2.129976, -3.218827, -7.455028, 2.386190, 3.734606, -5.059605, -7.695398, -0.376644, -4.004558, -2.763494, 4.476542, -3.777432, -1.149516, -2.825100, -0.072625, 2.489635, -0.575494, -4.106018, -1.894740, -1.630066, 0.408513, 0.282311, -1.336001, -2.018093, 0.629221, 3.433453, 9.361641, -5.103720, -3.762751, -2.853069, -1.158471, -2.433638, 0.210483, -6.485835, 6.520605, 3.520422, 2.577328, -1.156697, -3.965161, 1.169376, -1.319926, -0.034912, -3.042283, 0.774453, 0.665754, 2.185024, -4.421767, -3.446357, 2.786642, -6.030744, -1.614241, 1.315408, -7.101973, 3.690604, -0.415112, 7.313778, -2.926476, -0.290797, -5.514432, 0.841516, 1.969277, -14.760344, 0.884348, -4.024467, 5.267989, 4.733936, -1.186992, 4.998606, 0.577960, 3.477447, 0.365535, -0.895383, -0.628051, -3.524010, -0.205574, 2.989354, 5.130811, -0.313078, 0.736915, 0.839899, -0.212973, 1.740184, 0.251238, -0.537984, -4.498885, 0.031354, -1.305558, 6.437317, -1.141151, -2.283838, 6.199955, 5.687241, 0.240623, -0.954767, 0.051225, -0.745435, -3.853688, -1.647825, -1.561069, -5.902158, 5.880575, -1.076473, -1.281096, 2.056972, 0.865724, -2.302963, -0.756434, -1.283108, -4.955392, -9.511735, -0.355788, -10.176494, 3.282251, 0.822468, -4.221086, -0.473516, -2.082666, 0.036900, 1.924422, -2.950413, 2.858976, 1.861050, 6.765419, 0.106302, 7.264514, 2.551827, 0.581706, 1.483190, -1.393805, 3.933737, 1.098404, 6.059342, 1.801395, 4.559071, -0.120819, -4.145657, -0.344000, 1.612767, 2.332033, -4.709498, 2.634722, 5.551647, 3.081794, 3.137625, -2.932366, 1.976123, 0.241211, -5.633870, 0.232125, 2.681178, 0.064227, -1.990259, -3.993648, -6.480330, -2.204516, 3.326689, 5.201554, 1.626778, 3.497411, -0.736615, -6.676769, 0.577333, 6.745975, 3.828688, -1.191870, 4.294685, -0.083528, -6.004559, 1.499604, -2.325432, 2.013898, 0.094628, 3.561403, 8.742531, -3.274329, -0.808786, 0.693982, -7.597322, 7.022217, 0.881783, 2.157397, 0.002330, -2.763650, 1.180749, -5.554371, -5.703213, 3.825368, -2.116017, -2.703773, -1.524027, 0.495374, 5.298570, 9.192527, -3.962620, 1.509935, -2.476857, -4.748866, -2.855859, -0.625818, 0.811186, -5.270221, -0.156211, 2.930984, 1.164865, 0.339505, 4.635352, -2.653825, -8.683824, -2.031138, 4.964819, -6.096752, 3.306898, -1.343876, 10.280967, 5.727515, 3.781071, -5.183549, -0.872773, -0.432444, -0.312937, 1.391468, -0.482233, 2.884065, -5.039195, -1.335339, 1.650138, 8.282273, 2.459844, 1.657121, -1.630570, 4.782663, -2.946631, 3.781657, 2.937099, -5.267007, 5.953804, -1.084603, 2.788103, -2.712114, -0.262266, 0.607717, -2.501194, -3.340202, -6.881357, 4.404689, -6.081471, 0.686818, -4.136237, -1.362703, -5.371671, -2.714184, -4.917381, -1.125225, 11.547019, -3.648393, 6.157897, 3.465099, -6.923062, -1.622779, -0.544766, 3.492985, -0.348151, 0.063623, 0.359321, -1.383675, -0.818069, 2.882192, 4.881864, 0.238952, -6.199996, 4.353363, 0.541442, 1.825681, 0.626724, 2.982978, -4.277094, 5.823081, 1.461838, 0.001074, 3.588224, 0.356305, 4.985291, -5.562328, 0.341573, -2.285148, 2.533214, 0.041677, 0.239970, 5.736994, -3.533044, 1.756228, -7.088032, -0.299523, -1.747292, 0.109185, -11.284340, 2.655393, -5.061846, -2.916263, 7.243564, -0.618130, -0.596763, 3.281673, 7.809619, -0.361148, -0.511788, -2.062974, 1.295618, 1.387303, 1.370848, -0.977142, 0.326596, 6.259760, 6.000131, 2.168836, 0.292792, -6.100121, -1.648291, -2.910793, 3.429210, -4.495223, 3.250162, 7.696633, 2.055804, -0.493762, 0.638779, 8.560684, 0.350733, 1.444609, -0.335017, -1.775962, -3.751612, -1.081775, 2.703094, 2.477901, -3.246076, -0.143827, 2.114472, 2.879983, -1.457717, 4.617747, 20.876135, -4.724092, 3.200448, -4.266057, -1.885840, -1.964761, -0.052492, -0.721372, -0.722218, -3.953537, -0.274128, 5.950771, 3.678252, -4.097919, 3.432512, 0.215204, 3.412847, 0.030229, -3.187820, 1.997679, 8.527829, -6.428014, 0.156769, -3.300626, -5.941474, -0.420770, -0.210680, -6.008432, 4.447064, -3.820146, 0.379853, 0.058582, 4.611588, -10.745686, -3.827505, 3.752095, -0.635158, 4.823095, -1.134440, 0.139132, -0.630321, -2.011078, -5.275105, -0.755605, -1.410538, -3.917605, 3.136648, -1.648700, 5.472077, -2.083815, 0.191695, -6.190964, -0.168221, -6.408861, -2.145018, 1.758466, 1.327917, 0.019401, 7.866415, 2.777255, -6.084412, -0.504542, -0.142678, -0.448162, 4.838410, 1.105759, -0.422734, -4.942491, 0.948695, -0.571098, 2.534549, 0.251890, 3.318484, -4.457048, -0.246802, -3.363921, -0.296879, 1.394156, 0.457000, -0.260458, -7.491118, -1.654942, 6.793195, -4.986042, 0.055306, -6.060972, 6.204267, -8.694168, 5.139706, -1.689499, 2.651025, -0.405957, -0.215962, -1.543237, -0.002264, 0.355597, -3.120486, -2.953389, 2.215263, 0.986249, 2.004209, -5.248799, 3.712906, 3.268253, 1.040430, 3.008511, 0.034117, -2.448920, -0.730816, -1.599155, 0.758854, 9.229083, -0.005004, 1.524734, -0.537981, 0.734243, -1.898822, -0.422717, 7.672883, 9.950904, -5.263350, 1.022968, -1.795026, -0.535556, 7.793455, -0.231216, -5.477964, -0.671323, 3.567302, 0.041599, 1.383575, 2.692178, 3.024520, -10.108123, -0.098066, -5.319206, 1.889755, 1.094370, 1.288221, 0.070469, -5.713789, 0.675052, -0.066914, -4.874253, 2.581139, 1.161425, -8.823732, 0.140147, -0.213331, -5.906888, -4.578184, 1.686043, 5.593365, 3.533107, 8.295037, -2.775516, -0.284114, -2.138807, 0.855444, 0.318211, -5.040519, 0.273568, -0.193171, -4.790638, 4.372327, -1.339575, 3.668198, 6.046596, 0.423543, 1.745564, 0.505791, -0.998355, -1.428472, 2.759795, 0.361569, -0.149641, -2.188704, -0.000538, 0.276508, -1.737962, -0.938707, 1.787655, 2.419068, 1.364403, 4.036608, 2.264567, 0.533245, -0.952425, 1.093489, 0.429062, 4.051697, 2.760857, 4.637475, 3.216821, 1.047927, 0.034678, 1.946916, -1.288518, 5.749068, 11.236843, -0.891525, -0.230383, -3.979483, 0.844333, -3.073246, -0.899961, 0.112676, 0.953129, -0.556389, -0.744372, 2.560169, 1.527440, 9.736571, -6.823177, 1.568513, 1.866843, -1.664699, -3.037961, -4.152460, -0.000117, 4.709698, 1.655811, 8.769832, -0.399168, -2.194526, 0.518833, 7.706134, -0.545377, 0.361829, 1.563736, -0.074118, -3.210176, 0.547159, -1.066329, -1.368820, 1.067310, 0.018816, 0.108870, -4.106760, -5.334402, 2.222003, 2.389158, -0.350109, -1.678871, 6.340547, 0.206899, -4.048324, -2.989235, -0.577846, -2.924823, 6.450324, -1.964302, -0.070562, 0.888865, 3.409028, 0.186453, -2.403039, 10.645032, -3.669665, 1.443856, 0.163795, -4.324875, -7.968771, 0.901046, 1.103202, -1.768245, -0.255321, -0.081645, -0.689045, 1.541479, 1.925359, -2.122916, -1.454993, 1.490373, -0.115398, 0.501115, 4.462778, -6.486024, 3.097420, 2.717251, -0.979415, -0.156620, -2.506409, -4.246205, -0.407856, 0.332777, -0.947588, -12.701550, -1.615865, -4.984478, 2.422003, 0.918070, 3.422274, -2.923225, -2.219902, -1.527299, 8.669552, 0.757652, -7.139126, -2.766340, 6.131807, -2.698269, 2.792774, -4.081421, 0.068330, -0.348497, -5.076352, -5.389002, 5.799190, 1.741608, -1.670740, -2.037165, -2.898606, 3.917571, 0.192078, 1.916032, -3.571789, -0.012388, 3.072065, -3.809190, 1.144424, 2.520948, -1.140449, 4.709317, -6.833424, 0.292813, -0.433921, 1.707562, -1.339970, -4.923796, -0.020864, -2.259656, 4.536613, 0.547248, 6.524737, -6.038517, 0.190737, 2.296142, 5.581495, -1.324289, 0.790286, -2.102135, -1.051986, -3.491091, 0.873869, -0.556358, -1.571750, 8.933004, 2.420325, 0.201940, 2.305543, -0.020546, 9.841742, -4.068511, 0.007328, 5.493869, -5.811314, -4.891520, -1.663812, 3.798612, -5.770410, -0.038278, -0.046073, 3.065815, 5.752711, 3.935663, -0.520426, -4.986781, 3.345602, 0.399667, -1.426442, -1.305061, 11.949132, 0.371970, -4.694106, -3.715069, -1.785653, 7.417334, -0.046858, -0.328459, -0.619027, 4.952254, -0.280194, -0.195900, -4.925470, 1.091309, 3.714402, -4.790899, -1.099012, 3.402861, -4.682663, 0.075650, -1.335872, -1.713712, 0.301721, 5.977397, 1.975923, 3.779584, 6.479292, -1.243348, -5.081936, -0.819842, -8.667365, 7.984306, 3.196889, -2.456238, -0.162036, 0.954915, 1.417101, 2.200590, -2.237679, -2.106289, -3.372339, -6.050962, 3.034973, -1.827862, -2.671412, 2.248384, 1.598400, 1.451663, 6.373374, -3.289393, -0.002666, 5.629801, -2.395743, -6.531272, 0.420969, 0.387590, -0.366343, -9.984303, -1.623720, -0.493820, -2.968705, 4.332264, -5.070169, 0.493697, 1.831998, -0.336312, -6.203096, -8.180768, -4.203964, -3.099047, 0.420540, -3.541414, -1.260756, -0.902897, -0.087048, 9.912076, 2.062619, 0.795598, -0.768905, -0.190682, -8.362232, 10.458933, -8.862805, -3.006865, 0.060958, 5.090002, -2.474933, 3.545698, -4.971230, 1.135828, -0.154151, -1.636367, 6.855558, -0.947844, -1.932342, -1.206949, 4.376014, -0.609596, 0.355875, 6.554003, -3.868812, 0.427834, 3.170920, -3.570000, -0.892403, 1.461200, 4.807756, -1.043838, 0.380617, 2.903239, 2.049362, -3.784583, 1.210863, 1.215841, 6.655245, 3.702009, 3.472306, -0.083865, 0.784953, -3.251274, 8.072958, 4.260328, -1.173335, -0.386510, -1.989874, 1.456163, 6.554066, -0.012034, -5.006885, -0.018063, 2.548033, -1.761526, 1.323905, -4.792915, -0.049102, 0.712940, -2.344241, 2.305605, -6.564734, 10.836132, -0.495553, 3.967372, 4.306948, 8.116026, 5.040866, -3.359933, -3.902645, -1.768089, 4.545218, 0.596399, -1.068539, -2.317191, -4.928998, -0.500831, 1.849990, 1.303427, 4.003483, 2.465532, -5.560050, -1.787768, 5.118622, 1.099808, 1.260572, -4.746913, -3.827788, -0.011982, -0.696365, 3.642304, 3.162141, 3.661181, -1.009650, 8.948164, 0.209512, -0.525730, 5.278910, -2.234629, -3.607345, -6.734541, -4.344431, -0.086174, -3.988655, -1.805668, 0.975225, 5.086757, -7.566240, -0.844781, 2.120935, 5.544988, 3.873322, 4.233092, 0.860084, -1.103033, 2.703806, 3.777308, 3.273662, 0.236095, 4.487761, 1.104045, -3.370062, 7.673077, 4.968377, 2.034759, -0.243290, -6.563126, -0.444486, 1.256436, 1.948936, 0.990776, -1.788437, 5.928935, -1.574325, 5.711511, -3.726691, -2.318228, -2.928472, 0.841987, 5.552748, 6.231050, -1.080486, -0.560032, 0.343875, 0.478820, 1.577655, -2.069352, 3.538757, 2.738144, 5.953899, 0.943720, -2.871507, 0.184926, -12.671627, 4.877713, 0.070981, 6.882595, -13.802091, 7.691815, -0.105061, 9.172771, -2.693112, -1.579502, 1.980792, -0.242440, 1.777880, 1.709190, 1.680155, -0.436484, -3.480121, 4.734554, 0.068733, -2.571400, -2.385751, -1.918840, -0.367167, -4.130077, 3.263525, -0.004105, -2.520358, -0.130756, -6.271007, -0.057947, -0.465396, -1.468961, -2.898905, -1.459889, 2.608495, 0.106310, 0.517743, 3.969159, -2.469547, 1.960915, 1.666106, 1.293651, -4.565251, -8.760086, 8.736089, -4.102224, 0.471973, 4.588937, 0.587683, 0.054630, -2.194522, -0.514576, -0.099143, 0.402664, 0.775015, -2.749141, 5.394089, -0.152197, 2.781313, 0.495082, 2.436580, -0.519279, 0.451019, 3.525740, 1.866027, -5.291616, 2.673368, -0.521104, -2.942356, -0.596059, 1.104811, -4.148195, -3.897232, -9.161897, 0.627730, -1.668349, -3.652925, -0.207861, 3.698321, -0.205109, 3.103262, -3.494669, -0.187104, 2.243187, -2.481222, 4.020870, 0.554720, -4.490155, -1.031324, 0.442238, -0.315441, 5.906981, -6.786347, -15.138546, 3.032733, 2.355456, -0.824983, -0.596062, -0.749711, -1.812249, 3.716589, 6.805763, -0.034538, -4.483662, 5.036940, -1.492470, -2.044480, -5.116144, -5.993679, 2.552830, -1.059420, -2.743044, 0.413458, 0.575297, -1.070580, -8.552012, -5.880134, 4.774290, -3.788603, 2.796700, -2.777897, -3.238499, -5.397844, -0.289818, -2.881069, 2.106607, -0.164478, -0.001615, -7.540624, 15.398337, 3.278969, -4.629666, -0.899672, 4.580218, 6.048995, 3.034004, 2.903438, -3.602129, 0.601856, -2.010599, -2.186819, -0.793332, 0.115452, -0.448470, -8.010987, 6.539059, 1.265843, -2.597739, 5.706591, 2.872288, -4.839345, 1.063444, -4.270168, -6.976844, 1.866566, 0.278143, 10.704450, -4.744617, 0.206667, 2.233127, 0.898024, -4.417085, -5.821124, 0.096072, 6.063853, 2.847756, 4.152411, 1.970534, 4.777279, 0.574907, 2.086689, -5.487464, -0.013441, -2.087293, -2.006710, 5.620429, 6.012352, -0.751556, 0.012052, -0.184387, 2.561278, -1.901040, -2.528442, -2.109064, -3.635317, 0.929790, -2.488345, 4.213092, -2.267895, 2.424357, -0.212941, 1.252959, -1.502855, 2.222781, -0.749264, -3.946250, -3.540442, -3.082764, -4.688244, 6.330848, -4.555147, -2.398614, 3.231541, -4.868418, 5.355044, 5.841464, 3.334554, -3.275609, -4.493990, 7.369086, 0.237450, -3.884218, -0.925154, 2.506452, 1.658967, 0.444476, 4.098067, 3.871061, -3.002503, 12.404830, -0.561153, -0.054302, -2.048875, -0.334486, 2.267861, 0.017389, -0.105509, -2.429669, -0.018714, -7.827611, 1.199149, -1.922626, -2.585815, -4.252509, -0.393018, -3.696688, 1.452739, 1.598485, 1.685126, -2.847807, 4.094100, -6.610662, -0.119588, 1.761243, -3.900064, 3.136038, 1.836982, -0.076858, 0.411214, -5.531932, 7.002223, -0.184232, -1.050833, 0.430106, -6.836293, -2.949204, -2.118420, -4.025091, -5.452693, -0.661648, -1.625802, 1.181498, 1.392349, 7.168321, -1.408562, 0.853828, -6.870427, 1.122499, 3.143206, -1.332579, -0.359509, -1.418879, 1.400915, 1.712554, 5.027317, -6.078128, 1.559343, 1.542283, -10.188326, -0.212963, -4.286996, 4.560884, -6.536518, -0.735523, 4.853716, -3.767861, 0.699214, 3.094204, 2.912627, -0.100957, 3.190971, -0.249455, -2.429652, 4.472758, 2.040844, -1.431998, 0.439897, 0.107661, -0.489407, 1.599565, 3.020194, -3.008865, 1.103213, 0.586688, -3.671905, 2.919884, -9.499748, 12.771971, 1.386624, 3.641678, -2.699918, -1.835520, 0.783594, -3.013034, 0.938061, -6.501561, -0.055753, 4.100601, 0.444875, -3.243652, 0.213727, -5.381481, 0.951961, -5.346110, -3.838861, 1.553692, 1.063085, -3.057676, 1.183920, -1.756806, -1.831886, -1.485562, -3.231427, 0.436206, -6.629155, -1.100268, -7.568878, 1.134432, -0.500045, -6.556386, -0.018642, 1.592560, 0.061444, 4.572968, -1.359127, -1.944338, 3.045600, -1.820060, 1.791443, 0.983944, -3.531764, 0.452376, 5.627474, -3.524567, -4.320384, -1.675994, -7.208580, -2.563125, 0.773399, -0.141520, 1.114766, -2.537768, 0.260443, 2.469838, 5.141861, 1.035009, 3.269918, -0.025757, -0.525366, 0.294970, -0.593817, -5.929312, 2.198774, 1.646645, -2.754306, -1.698543, -1.750308, 3.245319, -0.352226, 2.511466, -2.598152, -1.736168, 0.124788, -5.378657, 6.752583, 0.928031, -1.949350, -0.313992, -0.143224, -3.282874, 3.142558, -1.015842, -0.166384, -0.708272, -0.868943, 2.455537, 0.000454, -4.127312, 3.607216, 1.942774, 0.688502, -1.330716, 1.389135, 3.509871, 4.487970, -0.549951, 2.801546, 1.892340, -1.206898, -0.574205, 0.912621, 1.108271, 2.417092, -0.384985, 0.796672, 2.733675, 5.479104, -0.482097, 0.975837, 1.080657, 1.819036, 0.481724, -2.958295, 0.986961, -2.752667, 0.006310, -2.004480, -0.521186, 2.354844, 6.198243, 9.812953, -1.175043, -0.914251, -0.982123, 0.001049, 8.242318, -0.254467, -1.744546, 0.090742, 2.777944, -0.590434, -4.633141, 5.098579, -3.303007, 6.371385, -0.177810, -0.151930, -4.211636, -4.906734, 5.338553, -0.660589, 0.947276, 1.726016, 0.238001, -5.286612, -6.119817, 0.339379, 5.888375, -2.705546, -3.789344, 2.375430, 3.910210, -3.564741, 2.976503, 1.181920, -4.045178, 2.322616, 3.701419, 5.809314, 4.762037, 4.218190, -2.634000, 7.359211, 3.199809, 0.433280, -4.461869, 4.829636, -2.617466, -0.253128, -2.061197, 0.720782, 3.579184, 7.735759, -0.085115, -9.521240, 4.270761, 1.051151, -2.254254, 4.003664, -5.511807, -4.849020, -0.024963, -4.589090, -0.479216, 1.063627, 5.243871, 1.610727, 5.410498, 2.759169, 1.699760, 1.769351, -0.926055, -5.706288, -2.869562, 0.324755, 0.162224, -0.036747, -0.599387, 6.699827, 5.586567, -2.604620, 3.132823, -1.935177, 1.675359, -0.849052, 1.504656, 1.309443, 4.039851, 1.260914, -0.572112, -2.603785, 0.819509, 0.044418, 0.977641, -0.276655, 0.001733, -2.838774, 6.379413, -8.539611, -5.025818, 3.382762, -4.050978, -3.848169, -0.160513, -3.040981, 4.147115, 1.921999, 0.517118, -0.379783, -1.113611, -0.519426, -5.070415, 3.399350, -2.266479, -1.011812, 7.110214, 0.668227, 1.047322, 4.506050, 2.719533, 1.924665, 2.321382, 0.830635, 6.062304, 1.063204, -0.916343, 1.234224, 3.562131, -4.437900, -6.819008, -7.978900, 2.384413, 1.588225, -3.830861, -0.111517, 6.977466, -1.371589, 4.084367, 2.448900, 5.863358, -3.097861, -5.413271, 4.642662, -1.094156, -1.340076, -0.159233, 2.569523, 0.927021, -0.889272, -1.676083, -1.067212, 1.326139, 5.071946, 5.758622, 4.551999, 3.346840, -6.416158, 5.248587, 0.974993, -4.547678, -2.343253, 0.923081, -0.823665, -2.823153, 2.116376, 0.000125, -0.545516, 7.003466, 0.407590, 1.891820, 1.841134, 2.885477, -2.568388, 3.416747, -3.895938, -1.098560, -4.670901, 7.180565, 1.383140, 0.144079, 0.250928, -3.730693, 2.393953, 1.434225, -4.413769, -0.597606, -2.124769, -3.904779, -0.204972, 0.595127, -0.302229, -2.681247, -0.453304, -0.124337, 1.118614, -1.132095, -0.010216, 6.624914, 0.619006, -4.529122, -1.053771, -0.741000, -0.302455, 4.345139, -1.644648, -5.829570, 2.616752, 4.953855, -4.260857, -5.226663, -6.016680, -3.423861, -0.427386, -0.650108, 1.438002, 7.219870, 15.597327, 1.648259, -4.915005, -2.176121, 1.252684, -8.067482, 1.004887, 2.234263, 5.244569, 1.403449, 0.251684, -1.603438, -4.736930, 2.207020, 0.487028, 1.785503, -9.469984, -1.735166, -3.076313, -0.084734, 6.094162, -2.243753, 3.188770, 0.752161, -2.115803, -3.962299, 5.416203, -0.519325, 2.841952, 1.403736, -5.291543, -1.998298, 1.161631, -0.728690, 3.158396, -8.010592, -1.127579, 1.470116, -4.080388, -1.537599, 0.555249, 0.032825, -4.652673, 4.491270, -0.002560, 0.100157, -1.179998, -1.025526, 0.051904, 11.325600, -1.089780, 1.430256, -1.505951, 2.849197, 1.997859, 1.951916, 0.050783, 4.551379, 3.448592, -0.477766, -1.388904, -3.126778, 2.843332, 3.972567, -1.330226, 7.247967, 2.097585, 4.222962, 0.915490, -0.481500, 1.488296, -0.242089, -4.644547, -0.521147, -4.184572, -4.921031, -0.104900, 1.947062, 5.209659, 4.575287, 2.997641, -0.902476, -7.953763, -2.229190, -0.592449, -1.466445, -6.111979, 0.791697, 2.953671, 0.242007, -2.118891, 1.598533, -2.975007, -8.495104, -0.409968, 1.095940, 5.209512, 1.841356, 4.131776, 2.690130, -2.381924, -2.828803, 0.425064, 2.708450, -2.199247, 0.442123, 0.961749, 4.514327, -0.857212, 0.078217, -7.150231, -4.997750, 0.265880, 0.126052, 3.540422, 3.314131, -2.644301, 0.383521, 0.471140, -1.247263, -0.154060, 0.464139, -0.329758, 0.577399, 3.095126, -0.921622, -0.002884, 6.549686, -8.521924, -6.662467, 4.260942, -3.476185, -1.468084, -6.702409, -5.054902, -1.544958, -0.488450, -0.359168, -3.452594, -3.125045, 0.018224, 1.207793, 1.758054, 11.474294, -8.620993, 1.953140, -0.891067, 0.855473, 0.894033, -1.386394, -1.787122, 4.055862, 1.841960, -0.046877, 1.770575, 0.254900, -0.204991, -6.284609, -0.210361, 0.225045, 5.821990, -5.078853, 5.172901, -1.057866, 5.414890, -0.296372, -0.323642, -1.942555, -1.782322, -0.142216, 2.361175, 5.863658, 1.255091, 0.113655, -1.761485, 2.057706, -4.621158, -1.826589, 3.453382, -0.796955, 4.298389, 1.309121, 0.652609, 0.609930, 0.786433, -0.605716, 0.437038, -0.250982, -0.070207, -5.990405, -6.226016, -3.236530, -10.948854, 0.253946, 0.778204, -10.692794, 0.139799, -6.137545, 4.335771, -1.723912, 3.998691, 3.596651, -5.966559, -0.767783, -5.661316, -1.714273, -1.192031, 0.434605, 2.608742, 4.364273, -1.155325, -1.330185, 3.341307, 0.134348, 4.718712, -2.172134, 1.181792, -0.085315, 4.335872, -6.531881, 5.408314, 0.733250, -1.875060, -9.689086, 3.015121, -3.059316, 1.736107, 3.458842, 1.196520, -6.385832, -8.142164, 2.427735, 3.408198, -0.353162, -5.189793, 11.118150, 0.474814, -1.520812, -1.571485, -2.071823, -0.662163, 8.865157, 3.969018, 2.120155, -3.203269, 0.154398, -0.445203, -1.775447, 2.848330, -9.318402, -3.611183, 0.622507, -4.779537, 0.814549, -0.207286, -0.250177, -1.948637, 3.053443, -6.163026, 0.669252, 5.747634, 3.574313, -7.725072, 3.276859, 12.967279, 1.460282, 0.103523, -1.440002, 3.095255, 5.263038, -0.277557, -4.912306, 4.413857, 0.448069, 6.697745, 0.037776, -1.275408, 1.980901, -4.507404, 1.571898, 0.028515, 3.551959, -3.258892, 0.196952, 0.865760, 1.771985, -0.560924, 0.149171, -0.590992, 0.250892, -12.705998, 3.472564, 1.871170, 3.777988, -2.231085, -0.200798, 2.148047, 4.741966, 1.799010, 2.405207, -5.309678, -0.116218, -1.683699, 1.348477, 1.605752, 0.688772, 3.241073, -0.059511, -0.777637, -6.825166, -7.449450, 0.000307, -4.844162, 4.551309, 0.244788, -1.791866, -5.255681, -4.330151, -0.454040, 1.124615, 1.945446, 3.886403, 1.820207, 0.098987, -3.946821, -1.203425, -1.235008, 5.151693, -0.114844, 0.723914, 2.628422, 0.004160, -1.184926, 0.814486, 0.016687, 5.080126, -0.401210, -8.082789, 3.965074, -3.200977, -0.490179, -5.809491, 1.428539, 1.924792, 4.635196, 4.836840, -0.097732, -0.966319, -3.553488, 7.078143, 3.361048, -0.808940, -2.389795, -3.335257, -0.024080, -0.788780, 3.495562, -5.971897, -2.452428, -0.514659, 4.649741, 0.305733, 2.462263, -1.121514, -6.612349, 3.638024, 2.552860, -0.314396, 5.796158, -4.216064, 2.694710, -2.803408, 2.020118, -0.616314, 0.705488, -2.978780, -1.365438, -4.841853, -0.005801, 2.348718, 4.094787, -3.933133, -0.282258, -0.190431, -2.611558, -0.889462, -3.617303, -3.399031, -2.807600, 7.340544, -7.016870, 0.865675, 4.735157, 6.240104, -0.805582, 3.563448, 0.245296, -1.364166, 0.065088, 1.218986, 1.581612, -10.317154, 2.828020, 0.631281, 5.292290, -1.012066, 3.295135, -2.118221, -4.065128, -2.025720, -0.542885, 4.864784, 0.972539, 2.409106, -3.026566, 0.052834, -0.635978, -1.371673, 4.564446, 0.266019, 2.473337, -0.515108, -3.199785, 1.433381, -5.369630, 5.401772, -1.989402, -6.665758, 1.262215, 2.464196, 0.130658, 0.604949, -2.326622, 1.560050, -7.688268, -0.869894, 1.476173, -6.076809, -5.910969, -0.345719, -0.468494, -2.862985, 1.228409, -4.402172, 4.530817, 2.801656, 3.120594, 1.146520, 0.227465, -8.773477, -5.237466, -2.359005, -1.473632, -5.939785, -2.033813, -0.396641, -0.916695, 3.696210, 5.658598, 1.240551, -5.646675, 0.040788, -3.990147, 0.487286, 3.847026, -7.868302, 6.422363, -3.844559, 1.576421, -0.699903, -3.094056, -2.831705, -3.381935, 4.725066, -1.884259, 0.691949, -5.523872, -1.687171, 3.662633, -4.199095, 9.477685, 2.940517, -1.661136, 0.400198, 0.940718, -2.082354, 0.595138, -0.009062, 1.398316, 3.604544, -0.527214, 1.110491, 1.020108, 1.789007, -0.024640, 0.086730, -2.491491, -0.620270, -0.389086, 1.234780, 3.212897, 4.291111, -7.174760, 1.364712, -1.673538, 4.086433, 4.412915, -0.472308, -0.473228, -4.162614, -0.027960, 3.082211, 2.567189, -2.216553, 1.678050, -0.811290, 7.225561, -7.684853, -4.117867, 0.079860, 12.188529, -5.439374, -13.353810, 5.090911, -2.569392, -0.743952, -2.366469, 3.063961, -1.482493, -1.334927, -1.243435, 0.264588, -1.085266, 1.297557, 0.001704, -4.485649, -9.474123, 1.733926, -4.903178, 5.447418, -6.455180, 0.103101, -2.083845, -0.602263, 3.238214, -2.716414, -4.906877, 6.643152, 2.998199, -7.056654, -1.015258, 11.574869, 6.554796, 3.362308, -0.003733, 3.453950, 4.718795, -6.079797, -1.753457, 5.108498, 4.184959, -8.387860, 0.146388, -1.377552, -3.198732, 2.336383, 2.003642, -5.602957, 1.347418, -2.164727, 0.054452, 3.491766, 5.223670, 4.382021, 1.120073, -1.240652, -3.448155, 1.442902, -2.941849, -5.820024, -2.045847, 5.518284, 5.681960, -4.484001, 2.556159, -1.104272, -1.046558, -6.567324, 0.517567, 2.940260, 6.427362, -11.458510, -0.866187, -2.997734, -2.455468, -9.320253, -3.303707, 4.060272, -0.491199, -0.002366, -3.162255, 0.413209, -2.830812, -4.762064, -0.691013, -7.443161, -0.288066, 1.381157, 1.668391, -4.430522, 3.060613, 2.616444, 1.778201, -3.044982, 0.419092, -0.147936, -2.457551, -0.010444, 5.700674, 1.449705, -0.612144, 5.032084, -1.227064, 0.391349, 0.632198, -1.449035, -1.846903, -2.415545, -0.925181, 0.355926, -0.004332, 1.707547, -2.555514, 3.100350, 0.435743, 3.611297, -9.147227, -0.585761, 0.000278, -5.016597, -2.456300, -4.970089, 5.335125, 4.627530, -3.874933, 7.308241, -3.422642, 1.344155, 2.944600, 1.775678, 3.276104, 4.470214, 1.143845, -3.917175, -3.946400, -7.490934, -5.150952, -7.968368, -2.467618, -3.433546, -5.056820, -4.297731, 4.742975, -1.664365, 0.814348, -0.859673, 2.978123, 0.796562, 2.054491, 5.765913, -0.559808, -1.051795, -1.242198, 0.000221, -7.089042, 0.955357, 0.492572, -5.722262, -1.797783, -2.726589, -7.658901, -4.940908, 0.260004, -6.280422, -18.631365, -3.907656, -1.436970, 10.147831, -0.435030, 0.003930, -2.694990, 2.173045, -1.424667, -4.551793, 1.137839, 6.133080, -0.888418, 0.230520, -0.462355, -5.107423, -0.172269, 3.012013, -1.792457, -7.396113, -10.149408, -3.544129, 1.809829, -1.602990, -2.458670, -0.162557, -1.609908, -1.097731, 2.478265, 7.415278, -3.981320, -2.315916, 0.037548, -1.522557, -4.473760, 1.500247, 4.571434, 0.270541, -0.609279, -1.764493, -1.349163, -0.258172, -3.637425, 0.664170, 2.329094, -2.407710, -1.277284, 0.485533, -5.162691, 1.833838, -2.899857, -3.730976, -3.460948, -0.315993, -0.293317, 1.998160, -2.033452, 0.487131, 0.032926, 0.747196, 4.834600, -2.490738, -2.136173, 0.755432, -0.387367, 4.713502, 5.727706, -6.008123, -1.055226, 6.219704, 0.762673, -1.237593, -3.119219, -1.008921, 5.554079, 2.500906, 9.379539, -2.600659, -3.781100, -1.074150, 4.647661, 6.463739, -2.109208, 9.602910, -1.425759, 2.519351, -4.868101, -0.350469, -8.104247, -0.249852, -2.087500, 2.515036, -3.762993, 1.595349, 0.240776, -0.269171, 2.302707, -2.399481, 2.446138, 7.067205, -0.013742, -0.813670, 4.315806, 0.030355, 5.580543, -8.178206, 2.999027, 0.817311, 2.595429, -0.875011, -0.639973, -0.032789, -0.118570, -5.313186, -0.003777, -4.859177, 0.960610, -7.806963, -1.103608, 3.409426, 9.830523, 3.076019, 0.024818, -2.503727, -4.450749, 0.053584, 0.850095, 5.326097, 3.058037, 8.592230, 2.096411, -0.089672, 0.955738, 0.042149, -4.880162, 3.721464, 4.908263, -1.920460, -4.517033, 0.372884, -3.512476, 0.056348, 3.107348, 2.402527, 0.064225, 1.185405, 1.341910, -4.253679, 2.861337, -4.010659, 0.894487, -7.247368, -0.000893, 6.156673, 5.788464, -5.955809, 4.305521, 0.638483, -0.074308, -1.715720, 2.665249, 6.174049, 4.274242, -4.937733, -0.412310, -0.108476, 3.413505, 7.387181, -4.008705, 4.089046, -1.684506, 2.650360, 3.552774, 6.270626, 4.812716, 3.726841, 0.014254, -10.351541, -3.054636, -1.097221, 0.683964, -7.121925, -2.973965, 1.295165, 5.215690, -2.879241, 1.928433, -1.083969, 1.457107, 1.463645, 0.582464, 5.576221, 2.434537, 8.944540, -1.615804, 0.004095, -5.838607, 0.439014, -0.531610, -1.225080, -7.056833, 8.580327, 0.184839, 2.447998, 0.026272, 2.072340, 0.294159, -2.453215, -1.829973, 3.516114, 1.376701, -0.160926, -0.171540, -1.492921, 0.175216, -0.456417, 0.901945, 0.264705, 7.141057, 0.385551, -1.716259, -0.217394, 4.732449, 12.858533, -4.797349, 3.022484, -6.847541, 2.264313, 8.327429, 0.708435, 0.298350, 1.339623, 6.857325, 6.090995, -0.711907, -5.317862, -9.006277, 0.719134, 1.778589, -0.830922, 4.213965, 2.385133, 0.574354, 1.494178, 5.651868, 5.762273, -3.656021, 0.317673, 6.350677, 3.327611, 0.488446, -4.551743, 1.621502, 8.023821, 3.609228, 4.257627, -5.616302, -3.366006, 2.346124, -5.134101, -0.002437, 5.090351, 4.677687, 7.520842, 1.236903, 1.379966, 4.049139, 4.854925, 0.946248, -6.242638, 0.882017, 2.039562, 2.815969, -0.493101, 0.604626, 6.695972, -1.152543, -0.708954, 1.517877, -6.164851, -0.770286, -0.880754, -2.857412, -4.605688, -0.752854, 10.542234, -0.767521, 6.311722, -0.648967, 2.822311, 2.424105, -1.383784, 0.655183, -4.353927, 0.322624, -2.503654, -6.697694, -2.323251, 3.157937, 6.174052, -3.991862, 2.337104, 1.841762, -0.021823, -0.233497, 0.735497, -0.460269, 2.901000, -1.948082, 3.362817, -1.078811, -5.830395, -1.126911, -0.696781, 0.659697, -5.774518, 3.026677, -0.624733, -1.963984, 2.982625, 5.318667, -0.261951, -1.269010, -6.184422, 2.771906, 1.410886, -5.694937, -0.238314, -5.319174, 0.858094, -0.557519, 2.270931, -4.157010, 0.163534, -1.729811, -0.687096, -0.909277, 2.113277, 1.774920, 0.259986, 4.051906, 4.358549, -3.823084, 2.866276, 0.079413, -3.282643, 3.602583, -4.034286, -0.170759, -5.191716, -0.303129, 2.263378, 1.105465, -1.228492, 4.122843, -3.425029, -1.110700, 2.859209, -1.517243, -0.133670, -1.787520, -1.117020, -0.004118, -1.749458, -5.184211, -0.632913, 7.116434, -0.820585, -0.538100, -3.736050, 2.306218, -0.580903, 3.041562, -4.793915, 0.116510, -3.204783, 2.421368, 0.621671, -2.298523, 5.574130, 5.156301, -3.895345, -3.190936, 0.003904, -0.210380, 0.075178, -1.415915, 0.314303, 3.545636, 10.452524, 0.308431, 5.393156, 4.611145, -4.795026, 0.551152, -1.332147, -1.533049, 2.724701, 0.112138, 1.887591, -2.578153, 3.055818, -1.204668, -6.207092, 8.238597, 3.454141, -0.298537, -0.257892, 2.499728, -5.361709, 2.103728, -5.295871, 1.903355, 0.821097, -0.805249, 6.269466, 3.446429, -5.973318, -0.456753, 3.775240, 1.753724, 1.544705, -0.208631, -0.777346, 6.902713, 7.015963, 0.256568, 4.413515, 1.707932, 1.211990, 1.453060, -7.813718, 3.825574, 2.398544, -3.869414, -8.830361, -0.757796, 2.810469, 2.159734, 3.887664, 2.531574, -2.587343, 1.617573, 0.076285, 0.051549, -2.432191, -1.467406, 1.799585, -3.214180, -0.500947, -3.353161, 1.975000, 1.278472, -7.418863, 0.423090, 3.235873, 0.190312, 1.524740, -2.108716, -5.064704, -4.360610, 5.022361, 1.196381, -5.321489, -3.849074, -6.288360, 7.042095, -5.922540, 0.124113, 0.051638, -4.776923, -2.694787, 1.072526, 7.262791, 1.337453, 10.854486, -3.223768, -0.208705, 0.061224, -5.771092, -0.804541, -0.247919, 0.028130, -2.310637, 2.403315, 5.275797, 0.425248, -5.614018, -5.150809, -2.569344, 1.329278, 0.219058, -1.885726, 2.963136, 0.048093, -6.058012, 4.562561, -4.271072, 2.411485, 0.705157, -1.955973, 0.538758, 7.200862, 0.053691, -0.210938, 1.173432, 1.767322, 3.446575, -0.848888, 6.439497, -1.957364, 8.496014, 2.615993, 4.978897, -3.701126, 0.170450, -7.880435, -8.990486, 0.072872, 3.506958, -3.049961, 2.674550, -6.052963, -0.016590, -2.574883, -4.770270, 0.641006, 3.878675, -0.214942, -3.022496, -6.509954, 0.341253, -4.474951, 4.937757, 5.405204, -0.436071, -0.320585, 5.818399, 6.053077, 0.055024, 0.562246, 0.725039, 6.056796, -1.266948, 3.974546, -2.844677, 1.642181, -9.835558, -2.177721, 0.534739, 3.668169, 4.143951, 3.907747, 3.342772, -1.829029, -6.998097, -2.389975, 4.655732, 1.665773, 7.681024, 2.680847, 2.060605, 0.736731, -0.249828, 2.125784, 4.666676, -5.821370, -1.151837, -1.473667, -0.071509, -2.102807, -0.567075, 5.630590, -4.928468, -3.802998, 7.103188, 2.726189, -4.715731, -0.679007, 4.316378, 5.122252, 7.441735, -1.690517, -5.982613, -0.995818, 1.863775, 1.266526, -0.130073, 2.868453, -5.159033, 0.765051, -5.445976, 0.533186, -1.652635, -2.958902, -0.073641, 2.516429, -0.351381, 3.268773, -3.999427, -1.914936, 1.746080, 1.726612, 2.887558, -7.495602, -0.076782, -5.088868, 0.572199, 2.840711, 0.165677, 0.951421, -4.288205, -11.502398, -1.787641, -0.041416, 1.713929, 5.168766, -0.560446, -1.781411, -0.374694, -1.100172, 1.671594, -0.771530, 0.498905, -1.402435, 2.443130, 0.247629, 0.622886, -1.939705, -1.199916, -7.890548, 1.651277, 1.242517, 0.214661, 1.535269, -0.349612, -5.824280, 2.717916, -1.162352, -2.670184, 2.235982, 3.795414, 6.073619, 4.371194, -2.397419, 3.506198, 1.103271, 3.745935, -7.047450, 0.532427, -1.319007, 2.419747, 2.983305, -1.508792, -0.120573, -0.004616, -1.492574, -2.901405, -4.248282, -6.944168, 2.497512, 4.479490, -2.438242, 0.661082, -0.368722, -6.450502, 0.273977, 2.558973, 1.739906, 4.925027, -1.353585, 2.908525, 2.438493, -4.168282, -4.286761, -2.646605, 5.640347, -3.663446, 0.178413, 1.071040, 4.274489, -0.975368, -0.056342, 0.074378, 3.925021, -5.058712, -2.649401, 6.320730, 0.303362, -1.669372, 0.736516, -5.612288, 0.004627, -6.118943, -4.376561, -5.431967, -0.362653],
+	"starcoder:latest":     [3.403105, -0.885765, -0.083649, -0.515395, -1.043502, -1.477790, -0.695208, -3.574679, 0.565663, 1.371975, -1.056780, 1.216602, 1.283216, -1.958889, -0.857253, -1.728689, 0.492718, 1.702949, 1.719984, 0.119930, 0.494645, -2.054816, -1.626791, 0.552477, 0.914593, -0.485891, -0.929655, 0.832316, -2.315520, 0.547917, -0.765224, 0.153330, -0.042735, 0.145564, 1.248368, -1.530666, -1.399585, 0.352929, -2.051843, 1.968481, 1.520983, 1.547979, 1.193255, -0.237402, -1.942155, -2.296346, 2.379162, -1.602307, 3.292728, 0.407927, -0.696908, 1.446436, 2.481944, -1.326123, 0.340346, 1.648242, 0.607029, 1.106172, -0.079276, 1.179846, 1.420314, -1.833898, -0.382987, 0.208215, -0.539100, -0.346188, -0.015549, -2.717581, -1.565658, 0.294800, -0.982709, 0.755109, 3.554029, 0.325654, -1.445190, 1.074547, 2.289493, -2.029409, 0.729513, 1.288408, -0.126420, 0.425873, -0.166246, 0.503419, 1.323817, -3.467199, -2.646384, -2.708485, -1.055008, -2.570379, 2.377908, 1.696884, -4.983853, -0.891830, 1.252788, 0.375939, 0.323120, -1.784853, -0.381588, -0.275369, -0.837526, 0.328304, 1.629018, -0.889208, -0.358851, -0.869274, 0.932910, -1.347002, 0.216807, -1.067873, -0.689641, -0.425752, 0.580481, 2.593486, 1.537688, 0.365340, 0.532069, 0.626001, -1.530973, -2.397178, 1.007982, 1.205497, -0.429468, -0.331908, -0.456599, -0.819368, -1.969552, 3.051803, -0.362822, 1.180519, -2.426223, -1.355583, 1.801512, -3.590297, -1.407512, 0.225204, 0.694478, 1.715964, 0.953788, -0.149717, -1.663969, 2.499240, -1.394514, 1.897290, 1.178160, 0.097654, -2.142154, -0.422565, -5.164824, -1.461911, 1.930484, 0.248862, 0.326992, -18.250940, -0.654411, 0.761512, -1.188235, 0.099793, -9.864193, 7.373234, -1.453363, -0.995029, -0.032538, 0.459874, 0.949269, 3.383448, -1.526219, 0.481047, -0.425621, 1.067186, 1.081501, -2.134348, 1.094220, -0.120503, -2.301070, 1.271164, -2.035030, 1.213219, 0.657726, -1.705736, 3.684097, -2.503601, 0.632831, -1.559634, -0.002673, -2.948208, -2.696636, 0.449597, -0.909573, -0.759229, 0.260589, 1.520208, -0.851524, 0.087855, -1.957812, 1.004672, 0.248923, 3.312503, 1.961505, 0.906963, 0.595246, 1.654317, -0.600351, -4.145365, 2.747739, 1.764888, 0.480274, -0.265173, -0.882353, -1.578600, -0.573896, 2.892225, 0.688005, 0.580525, -1.248558, 1.149763, -2.220257, -1.262100, -1.361701, 0.243109, 2.385012, -1.720015, 1.022053, -0.682021, 0.414539, -1.402510, 1.598152, 3.237121, 1.387014, -2.603572, 0.843757, 0.307507, 0.537373, 2.153689, -0.138717, 2.065826, 2.999563, 0.701097, 2.253418, -0.570769, 0.312188, 1.284726, -0.284251, -0.051379, -2.217319, 1.398044, 1.745042, 0.186080, -0.839044, -0.303346, 0.606337, -3.448408, 1.731226, 0.870225, 0.542273, -2.155460, 0.334702, -0.275068, 1.966276, -0.384633, 1.741714, 0.608612, 0.487226, 0.785482, -1.650026, -0.924841, -0.638561, -1.453186, -0.953706, 0.498500, -0.801929, -2.049580, -0.374899, -0.400653, -3.826706, 2.235883, 1.789586, -2.078139, -0.928475, 1.301903, -1.350180, -2.056282, -0.748132, 2.414192, -3.171230, -0.269716, -0.264180, 1.101787, 0.376517, -0.858027, -0.751998, -0.988863, -1.460158, 0.065839, 2.376490, 0.568797, -1.039343, -4.346259, -1.290150, 0.131418, 0.069526, -0.831571, -0.826303, -1.744774, -2.419927, -2.815735, -0.413141, -0.384882, -0.835495, -1.659277, 0.653935, 0.150067, 0.346518, 0.612567, -1.113423, 0.135259, -0.163968, -1.602211, -0.661846, 0.099561, 2.117822, -1.855378, -3.213007, -0.001265, 0.357234, -1.260347, -1.437309, -1.076286, -0.276653, -1.644431, -0.385764, -0.074708, 1.841366, 0.864748, 0.391535, -1.602082, 1.233709, -0.303617, -1.689856, 1.423543, 1.232455, -0.144236, -1.455944, -2.345803, -1.453905, 0.357781, 0.418199, -0.703023, -0.868598, 0.796561, 2.178256, -0.090044, -0.434035, 0.021456, -0.020562, 0.059337, -1.527410, -0.010944, -1.772391, -3.496375, -0.837363, -2.433579, 0.619973, 0.045446, 0.652892, 2.023233, -2.406516, 1.417269, -0.034923, -0.076643, -1.172168, -2.982738, 1.452945, 1.058633, 1.382744, 0.848724, 1.418435, 0.443789, -2.663935, 1.315363, -1.585118, 2.505541, 0.981825, -1.224547, 2.369899, -0.968944, 0.988074, 1.748182, -1.508594, -1.675557, 0.455838, -1.370594, -1.239768, 0.199479, 1.319536, 1.112748, -0.125195, -0.939147, 0.706842, 1.091995, -0.170540, 0.543556, 0.774294, -0.152735, 2.050831, -0.514986, 1.178088, -0.480602, 2.236464, 0.233113, -0.234995, -0.099754, -0.641682, 0.720272, -1.274108, -0.343999, 0.778661, -0.077611, 1.104150, -0.271160, 2.553004, 0.887008, -1.080819, 0.928667, 0.713697, 0.786556, -0.861365, -2.288996, -0.585177, -1.993488, -0.426313, -2.300396, -1.196838, -0.950078, 0.357648, -2.617935, 0.566233, 0.135590, 3.430223, -0.185004, -0.924127, -3.156829, -0.663617, -0.921790, 1.351865, 1.895824, -1.705446, -0.587528, -1.112015, 1.855345, -2.307836, -1.210345, 0.671711, -1.614769, 0.037545, 0.351032, 0.898224, 0.659482, 0.740753, 2.317791, -0.738380, -0.465833, -0.489376, -0.555237, -0.076919, 1.755440, 0.075913, 0.426051, -0.117439, -0.692971, 0.205078, -0.242168, 0.529022, 0.576028, -1.798912, 0.482144, 1.288307, 0.295549, -0.854719, 0.781398, 0.616750, 0.973558, -0.965475, -0.746755, -0.661815, 0.252199, -1.844748, -0.616564, -2.413130, 3.795521, -2.212642, 0.363001, 1.099099, 0.432490, 1.544763, 0.077443, -0.128217, -0.267324, -4.240766, 0.595412, -0.285840, 2.883118, 0.083648, -0.773143, 0.828623, -2.842643, 0.941287, 0.980759, 3.152416, 1.180029, 1.177530, 0.005636, -0.574758, -0.544597, 0.332312, 0.409605, 4.502332, -0.585149, 0.432413, 2.717876, 3.941690, -0.025800, 0.670812, -0.115604, 2.169778, 1.022263, -0.531681, 1.383107, 0.884408, 1.822171, -0.609354, 0.606749, 0.647798, -1.053839, -2.381397, 3.332797, -1.086664, -1.448608, -1.387584, -3.250617, 1.918070, -0.546604, 0.788303, 0.922701, 1.070095, 1.329838, 1.643232, 0.081183, -1.996287, -0.549173, -2.555878, -0.277837, -0.651010, 0.242560, 0.613469, 0.061182, 0.985218, 0.650243, 0.457274, 0.106429, -0.707542, 0.648733, -1.470251, -0.196635, -0.896454, -0.657666, -0.848689, 0.015037, 0.621408, 1.830077, -0.043118, -0.799176, -0.866995, 1.279791, 2.239098, 0.482312, 0.855783, -0.030351, 2.994683, -1.111507, -0.678747, 1.296768, -1.387152, 0.506191, -3.240442, -0.491302, 1.143967, 0.451711, -6.848300, 2.822928, 1.094856, -0.524747, -1.055822, 0.692936, 0.570139, -0.132942, 1.337316, -0.069503, -0.370510, -2.173159, -0.146296, -1.528087, 0.801579, -2.051918, -1.570595, 1.266944, -0.711103, 0.507582, -1.092141, 0.443071, -0.626451, -2.428448, 0.029222, 0.103925, -2.523681, 0.274318, -0.338442, 2.418235, -0.047461, 0.384895, -0.688121, 4.450501, 1.491218, 0.054557, -1.449451, -1.366343, 3.092702, 2.741473, -2.091660, 0.849361, 0.794003, -0.273619, -0.436321, -0.395436, -1.547231, -0.204541, -0.224338, -0.125708, 3.619517, 0.512341, 0.580925, 1.772129, -0.749979, -0.245782, 0.117082, 0.798920, 1.260713, 2.256417, 2.782530, -1.792616, 0.125660, 0.319170, 2.403524, 0.868421, 2.345238, 1.822214, 0.305834, 3.414123, -0.678114, -1.243389, -0.064596, -1.110805, -2.301048, 0.114277, 0.822538, -3.369426, 1.348845, -1.608047, 1.304970, 1.106858, 0.178412, -1.399775, -0.868408, -2.612700, -0.089507, -2.743819, 3.032486, 0.680813, -1.610402, 0.519157, -2.944211, -1.901170, 0.255229, -0.908480, -1.014183, 0.829586, 1.283460, -0.866477, 2.368938, -0.183073, -0.442842, 1.119585, -0.065673, -1.281052, -1.533420, 1.392235, 0.678078, -1.309221, -0.098553, -0.052573, 0.773775, 0.577807, 0.344661, -0.071810, 0.470932, -1.389364, 0.231375, 2.169107, -0.111148, -0.471375, 1.468163, 0.178234, 1.000863, -2.930396, 1.217327, 0.414857, 2.301280, 1.279193, 1.502020, 0.559959, -1.898672, 0.438860, -0.172736, 2.016475, 1.250178, 2.439908, 2.356597, 2.346795, -0.119561, -1.736535, -0.866320, -2.269344, -0.380247, 1.721837, 1.208060, -0.463667, -0.524570, 2.156525, 1.016336, 0.508051, 3.164112, 0.912623, -0.786937, 0.548691, -2.175578, -0.852418, 0.219264, -2.745629, 0.650010, 2.302742, -0.086833, 3.083979, -3.291409, -1.325565, -1.600101, 1.316101, 1.175194, 1.304705, -0.124973, 1.714000, 1.252908, -0.339423, 0.095515, 0.552147, 1.563889, 1.536479, -1.135800, -0.686791, 2.245939, -2.134178, 1.441871, 0.479644, -1.088613, -1.457490, 0.522330, -1.838494, 1.388874, -1.309874, 1.488708, 0.234144, 0.326873, 1.123322, -1.523686, -1.762303, 0.609608, 2.648861, -0.749516, 1.096308, -0.262848, 0.325710, -1.108091, -0.256752, 1.585081, -0.217921, -2.225226, -0.133899, -0.643038, -2.835729, -2.558570, -0.478151, -2.211241, 0.775718, -0.377337, -0.560052, 0.675511, -2.358796, 3.555706, -1.314640, 2.506774, -2.862707, -0.822494, 0.071026, -1.813606, -0.130201, -0.744126, -0.794054, -1.811334, 2.161353, 0.782546, -2.065440, -1.447563, 1.012326, -0.392525, 0.527692, -0.290882, -0.542280, 0.107965, 0.157866, -1.004487, 1.608502, -0.145668, -1.833334, -0.597097, 1.601349, 0.678382, -0.471299, -0.315880, 1.274011, 0.329983, -1.887083, 0.858173, -1.486972, 0.558593, -0.882209, -2.203498, 0.314512, 1.715837, -0.452137, -0.321170, -1.059508, 1.295388, 2.400156, 2.810319, 0.236921, -0.275691, -1.082528, 0.285290, 2.199128, 1.461259, 0.158661, -0.589804, -0.392554, 0.874221, -1.919368, -0.224965, 0.284844, -0.329165, 2.215252, 0.987173, 0.243996, 0.424165, -2.677810, -0.070846, -2.430132, 0.265424, -0.570085, -3.599534, -1.846077, -1.021166, 2.079120, 14.761611, -0.101933, 2.491434, 1.321804, -1.042176, -1.507433, -0.556304, 0.703751, 0.806636, 1.063058, 0.167255, -2.111855, -2.038255, 0.465825, -0.841183, -0.613164, -1.908800, 1.911980, 2.629885, 0.182362, -0.442470, 0.317966, -0.657572, -0.091976, 2.060539, -0.683898, 0.389377, 1.539390, -0.091685, -0.635328, 2.292643, 0.427552, 1.229509, -1.775795, -0.231926, 1.510789, -1.516075, 0.870915, 0.042147, -1.703443, 0.526075, -0.166702, -0.084150, 0.969713, -1.242767, 1.524845, 2.368418, 0.717042, -2.309449, -0.519731, -2.674847, 2.590868, 0.267367, 4.017840, -3.121681, 1.361615, -0.073771, 3.837650, -0.774441, 2.262183, 1.314109, -2.943380, 1.136152, -2.458218, 1.462981, 0.149853, -0.912561, -0.318832, 2.494370, 0.001506, -1.153213, 0.118310, -0.153211, -1.532233, -0.316073, 0.838619, 1.777950, -0.811849, 1.098278, 0.180579, 0.382676, -0.647900, -0.095141, -0.754732, -1.420778, -0.598671, -1.636378, -0.612884, -0.054185, -0.664659, 1.915045, -1.596123, 0.572644, 0.998049, -1.145053, 0.844229, 1.177464, 0.204193, 1.460010, -1.551229, 2.335606, -1.694714, -2.199979, 1.438900, 2.572799, 2.600366, -1.752213, -0.631761, 2.174498, -0.039297, 1.524690, 1.161811, 1.917906, -0.093180, 0.791555, 1.681564, 1.816557, 0.669684, -1.414118, -1.395584, -0.204529, 1.551600, -0.932364, 1.058350, 1.755896, 1.802606, 0.304214, -1.856664, 1.476071, 1.832048, 1.047213, 0.353127, 1.149176, 0.195622, 0.887775, -0.498183, 0.464600, 2.910213, -2.055765, 3.871612, -1.663870, 0.413334, 0.724341, -1.063226, -1.452561, 2.880621, 2.848557, -0.057286, 1.842694, -0.870889, -0.031014, 0.577814, 0.539084, 0.730862, -0.864076, -0.618822, -0.772608, 0.490613, 2.212158, 0.516135, -1.157739, -2.634500, 0.571563, -1.573212, -0.711765, 1.180959, -0.373291, 2.194703, -0.952596, -1.799709, 0.366626, 0.729010, 1.509726, -1.471722, 0.689311, -1.890980, 0.109291, -0.293854, -7.102549, 0.864711, -1.925957, -2.398473, 0.070788, 0.772047, -0.936493, 0.355516, -1.150345, 2.211665, -1.462134, 0.899524, -0.547171, -1.410111, 1.703938, -0.542024, -0.010607, 0.373755, 0.028845, 1.326445, 0.019488, -1.771527, 0.524494, 0.017291, -0.286668, 8.919433, 1.473101, -0.561951, -1.085727, 0.053229, 2.229653, -0.985613, -0.056620, -0.427974, -0.935476, 0.379258, 1.237837, 1.879896, -1.904384, -0.836707, 1.176770, -1.594859, 0.507863, 1.057378, 1.047227, -1.015435, -2.173443, 2.169472, -1.200064, -0.429828, -0.267489, -0.776403, -2.399449, -3.496821, 2.026711, 0.852635, 0.081830, 0.412480, 0.735022, -1.004585, 85.523735, 2.078824, 1.405537, 1.420258, 0.149226, 0.767906, -1.045229, 0.557545, -2.155961, 1.614029, -0.409987, -1.186746, 0.695672, -0.450095, 0.255835, -4.721718, 0.876708, -3.239688, -1.663275, 0.659779, 4.212816, 0.844798, 2.285716, 0.078218, -0.604332, -2.378236, -1.490959, -2.795218, 0.224045, -4.546728, 2.146162, -2.584908, 1.049261, -1.301235, 2.983293, -3.510124, -2.674554, -4.796475, -1.718512, -2.418987, -0.734958, -1.945737, -1.837463, 1.297716, -1.405850, 0.369966, 1.915294, -2.376806, 2.161540, -2.988676, -0.469228, 2.036156, -1.476177, -0.744910, 1.748057, -0.018671, -1.442792, -6.656451, 0.918635, -0.362746, 0.919281, 0.000438, -0.347663, -2.110237, 1.606745, -4.026446, -0.944152, -0.319697, 3.299418, 2.319917, 0.496700, 0.197903, -0.453018, 2.012677, -0.984651, 1.420823, 0.682351, -1.306300, -1.783154, -0.197681, -71.216934, -0.294691, -2.129059, -0.596002, -3.335691, 0.586273, 0.034065, -1.840573, -0.541438, -1.184142, 1.288468, -1.142040, 1.612682, -1.542768, 0.589435, 0.705813, 1.315846, -3.608182, -1.755418, 2.117489, 0.456578, -4.433335, 2.531418, -2.914086, 0.289848, 0.367142, 0.569207, -1.071584, -0.718807, -1.297903, 2.025395, 0.311929, -0.367405, -0.669306, 2.902781, -2.351904, -3.664730, 0.334796, -0.233591, -0.679808, 0.607821, -0.923462, 0.010394, 1.311700, -0.794279, 1.272044, 1.145559, -0.554230, 0.170776, -0.683578, 0.832438, -0.391338, 1.030268, 0.154898, -1.693181, -0.825256, 1.546282, 0.107223, -0.801853, 0.507212, -1.046948, 1.259310, 2.143708, -0.385058, 2.163804, -0.650710, 0.815172, -0.136191, -0.168338, -0.612016, -2.582033, 2.350318, -1.175564, 1.534975, -1.647843, -2.409241, -2.228449, -1.189075, 8.314133, 3.130002, 3.402851, -0.153991, 0.099502, -1.721480, 0.724244, -0.710175, -1.003277, -2.147532, 0.670640, -0.524477, 0.161175, -0.488788, -0.776367, -0.163723, 1.141242, 0.477827, -0.338653, 3.910418, -1.130356, -0.450035, 1.048413, 0.889426, 0.831565, 0.825409, -2.665827, -1.108922, 0.087185, 0.153231, 0.097283, -0.009498, 1.079242, -0.654185, 1.469537, -0.403617, 3.028829, 1.209276, 0.516813, 1.816853, -1.652552, -0.279288, -0.125876, -5.910278, 0.277881, -3.292769, -0.311732, 0.827438, -0.586001, -1.585721, -1.685168, 2.303844, 2.286766, 0.763010, 1.630826, 2.711380, 1.245094, -0.469989, 0.855273, 1.846231, -1.269401, -0.729980, 1.096651, -0.862948, 0.935156, 0.260136, -0.100846, -0.456174, 0.559441, -1.798339, -1.184900, -0.090250, -0.442664, 1.438874, -2.826569, 0.470369, 0.843450, 0.425958, -1.731183, -0.059186, 1.212237, 1.589445, -0.443802, -0.687553, 0.371740, 2.189436, 0.474874, -3.502179, 1.588933, 0.181478, 2.009142, 4.031461, -0.704131, 0.895830, -0.611670, -1.670501, 0.643384, -1.248581, 0.082029, 1.078038, 2.198903, 1.562441, -2.118029, 1.341515, -2.595295, -0.204816, -1.799077, 0.701830, -0.334559, -0.566707, 0.321890, 3.158910, 1.361753, -1.598902, 1.355754, -0.885561, 0.726786, 0.674248, -1.400915, -1.797559, -1.418724, -0.480368, -1.014434, -1.377594, 3.304005, 1.081017, -0.142937, -0.725848, -1.468716, 1.641540, 1.747708, -1.710998, -0.801586, 4.428770, -0.632803, 1.287084, 1.224783, 0.977675, -2.524289, -1.215465, 0.847066, -1.971717, 0.296576, 0.230343, 1.897276, -2.315022, -2.489991, 7.028148, 0.139651, -1.489560, -0.108529, 0.973838, -0.650666, -1.174496, 1.229644, 0.677356, -0.325306, 0.731556, -2.130413, -0.572011, -0.167396, -0.152763, -1.513432, 0.359530, -0.202073, 0.554924, -0.042635, 3.754293, 0.751861, -2.220118, -1.367504, -0.180788, -1.781298, 2.152536, 0.546775, 0.485454, -0.463405, -0.002958, 2.147824, -2.007004, 0.673060, -1.540999, -0.136570, 1.007624, -3.474314, -0.617297, -2.784721, 0.194940, 1.658762, -1.411829, -0.737517, 0.163858, -0.777713, 0.560314, 0.347544, -1.521263, 0.037006, 0.437072, -1.070862, 0.965648, 0.773028, 0.307019, 1.041627, -0.733241, -0.977493, -0.429238, 0.050614, -0.829562, 1.884014, -0.253405, -1.059937, 1.722486, 3.461274, -0.906070, -1.530156, 2.198265, -0.459783, -0.875729, -2.660546, 1.420933, 0.514850, -1.837734, 1.603957, -1.275833, 1.374402, -0.793919, 0.314476, -1.049270, -2.078234, -1.616638, 0.710537, 0.103625, -0.215743, 0.339599, 1.437179, -1.827948, 1.171729, -2.009682, -1.838992, 0.522406, -0.793820, -0.262933, 1.215268, 0.311192, -2.908158, -0.980768, 1.489822, 2.204947, 0.427181, 1.810356, 0.169103, -0.356967, -2.706281, -0.685192, -1.423497, 1.140927, 2.658384, -0.174297, -2.490361, -2.298670, -0.520309, 3.752534, -2.208305, -1.443815, 1.652624, 0.167723, 0.332152, 1.866258, -0.691057, 0.741715, 0.083042, 2.126400, 1.744948, 1.236283, -1.306377, 1.292937, 0.601601, -1.355803, -1.116400, -0.419224, 0.055426, -2.328909, -2.768005, 0.509521, -1.454753, -0.308576, -1.168338, 0.624593, 0.610668, 3.083825, -0.558987, 1.695581, 0.923159, -2.462692, -3.159868, -1.100677, 1.183890, -0.075880, 0.060596, 0.395339, 1.513366, 0.216280, 0.116348, 0.286980, 1.269514, 1.092108, 2.913883, -2.416278, -0.447192, -0.828047, 1.375085, -1.882448, 0.414232, 0.935447, -0.763903, -0.097080, -1.468093, -2.108227, 0.812448, -3.030955, 1.650276, 0.645374, 0.822754, -1.715253, 0.949443, -0.583930, -0.466487, -0.239685, -0.154113, -2.259127, -0.569317, -7.950617, 1.770871, 0.655170, -1.549587, -0.303264, 0.943074, -1.256375, -1.578881, -1.402247, 0.315739, 1.952985, 1.138246, 0.319435, -1.066226, -0.019303, -0.999552, -2.404504, 0.239226, 0.852229, 0.214280, 0.332852, -0.784271, 0.583816, 0.894294, 0.250293, 2.689180, 0.932068, -2.733958, 1.505688, -0.201235, 0.328853, -2.243844, -0.852288, -0.334035, -1.317273, 0.503698, -1.798193, 0.040109, 0.106212, -1.145169, 2.781565, -1.174213, -0.674775, -0.130195, 1.723452, -0.821414, -0.988254, 0.794259, 1.748087, -1.606799, 1.968351, 0.505302, 0.220784, 2.829643, 4.248420, -3.127151, 1.931689, 1.164102, 0.181212, 1.115405, -0.622059, 1.559150, 0.941379, 0.416869, -1.277200, 1.816182, -1.619316, 2.531234, -2.209251, -25.740744, 1.239025, 0.352856, 1.432124, -0.332224, -1.228410, -0.694785, 0.114492, 1.416174, 3.427281, -1.674708, 0.855310, -0.479453, 1.476262, -0.355835, 1.400066, 0.940392, 0.481807, -1.784303, 1.240418, 1.895536, 1.413484, 1.553220, 2.173478, 0.727745, -2.230431, 1.786677, -3.234339, 0.446137, 1.327993, -0.058461, 1.030128, 1.250014, 1.474441, 1.615538, 0.881243, -0.266443, -0.962094, -0.238979, 0.833562, 0.159211, -0.148265, -1.146298, -1.831205, 2.363937, 0.090987, 0.768067, -1.070195, 1.081125, -0.243713, 1.352430, -1.088068, -0.387508, 0.953645, -3.607654, -0.208207, 1.058219, -3.540133, -0.176322, 2.531761, -0.393910, -0.924363, -2.009626, -1.049115, -2.175923, 1.481581, -0.861316, 1.512275, -2.268067, -3.085932, -0.852963, -0.789792, 5.231994, -1.058063, 0.445215, -0.513278, -1.550916, 2.363637, -0.318830, -2.202391, 1.119512, 0.626732, 1.292118, -2.083883, 2.010500, -1.720330, -0.122876, 0.385993, -0.407019, -1.770024, 0.091870, 0.019525, -2.933962, -1.870932, -0.406637, -2.040491, 2.249710, 0.985226, 0.149422, -0.862363, -1.480567, 1.242449, -1.509518, -1.318086, -0.422596, -1.817529, 0.690241, 3.352541, 1.813458, -1.237180, -0.927014, -2.663395, 2.547916, -1.268841, 0.271833, 0.841779, -1.014784, -1.296559, 0.501792, -0.392863, 0.580616, 0.890395, -1.411767, 1.945539, 2.356490, -0.262101, -1.529146, 0.329766, -1.872703, -2.605136, -1.052404, 0.674966, 0.823735, 2.996305, 1.152302, -0.793134, -1.894817, -2.353142, 0.853609, -1.198352, 0.830396, -2.602845, 0.743173, 1.596138, 0.925561, -0.287728, -1.579190, 0.150707, -0.485093, 0.598073, 0.496564, -2.433938, 0.130943, -0.043445, -0.263888, -0.660274, 0.522155, -0.298216, 1.100825, -1.295024, -2.334423, 2.466950, -1.960287, -0.834913, -0.346131, 3.187697, -1.070666, 0.077956, 2.692153, 1.540347, -0.574894, -2.438186, 1.043280, -2.232725, 1.013549, -1.932124, -0.052887, 1.029336, 0.180861, -0.648796, -0.342356, -2.479274, 2.431973, 1.280207, 1.464802, -0.195265, 1.586246, -2.987961, 0.182641, -0.190149, -2.029029, -0.709019, 1.711207, 2.726928, -0.096125, -1.912724, -1.268916, -2.368214, 1.454860, 0.801292, -2.090639, 0.009335, 2.916453, -0.183593, 1.305621, -3.262085, 0.502414, 0.378839, 1.857305, 3.683550, -0.461560, 1.259201, 0.904573, -0.647443, -0.042906, 1.316142, 0.485262, -3.935773, -0.140856, 0.867518, -0.602693, -0.198637, -1.121627, -1.759840, -0.771186, -0.631131, 0.145151, 1.343153, 0.336181, -1.292166, -0.289050, 2.379897, 3.440751, -2.258761, -0.405697, -0.540307, -0.416136, -0.476906, -1.309639, -1.318478, -0.976233, 2.689849, 0.503559, -2.040556, 0.901806, 0.487302, 2.344204, 0.956174, -2.422247, -3.842064, 0.968857, -0.421343, -2.225902, 0.336079, -2.274005, 0.206273, -0.107490, -1.369931, 0.393495, 1.654393, 2.169506, 2.919102, 0.297331, -2.821940, 0.092887, -0.916158, 1.015810, -1.826803, 0.693532, -0.681707, 2.181287, -0.110305, 0.464497, 1.295338, -0.310423, -0.724674, 0.290203, 0.463358, -2.186084, 1.328547, 1.576344, 0.144542, -0.567771, -0.598133, -1.894698, 1.443123, -2.385668, 1.698364, -0.366134, 0.106805, -2.805731, 1.153937, -3.080271, -0.178250, -1.938542, 0.275216, 0.002397, 0.297635, 1.461271, 0.305871, 2.517221, -0.174000, -1.098956, 0.030936, 2.244885, -1.301349, 0.749536, 0.885684, -3.014035, 1.454764, -1.336601, -1.013402, 2.573905, -0.448159, 0.544451, 2.394425, 0.483757, -2.497281, 0.053167, 0.361224, 0.469581, 0.631805, -2.330993, 1.676361, 0.697558, 0.591515, -1.039913, -0.180206, -0.462571, -0.209459, -0.576065, 2.279284, -0.299172, -0.194904, 3.619387, 3.934561, 1.172894, 0.526611, 0.398059, -1.614865, -1.489991, 0.778708, 0.744119, -1.220411, 0.515268, 0.339539, -0.061137, 1.092670, 1.363884, -1.687957, -1.743584, 0.665694, -2.041978, 1.595987, -0.604114, -2.104793, -2.419405, 1.406765, -0.474117, -1.016261, -0.785798, 0.119473, 0.003550, 0.720526, -2.634799, -0.144064, 0.490726, -2.278433, 0.683341, -1.065922, -1.578778, -1.477575, 0.541458, -0.875865, -0.487008, -1.175961, 1.179989, -0.449157, 0.729390, -3.625034, 1.015845, 0.557515, 0.752713, -0.826017, -0.379720, 2.008976, -0.097426, 2.044271, 2.861414, -2.637050, 1.129714, -1.050685, 0.790966, 0.354504, -0.810705, -1.522652, 0.420090, 1.051780, 0.083435, -2.059101, 0.264866, 0.416219, 1.267804, -1.444463, -0.740033, -0.650456, -1.024885, 0.742108, 0.292962, -0.609546, 0.451259, -1.788922, 0.626221, -2.578477, 0.010107, -2.879778, -3.180357, 1.073472, 0.062782, 0.388496, -3.137921, 0.150180, -1.374213, 1.562546, 1.569208, -1.770587, -1.747225, -0.089173, -0.254919, -0.085761, -0.900465, 0.803404, 1.444025, -1.222196, 2.582620, -1.769255, -0.921337, -0.047699, -0.992220, 0.429236, 1.354063, 2.468357, -1.239177, 2.234062, 0.171570, 0.504752, -0.049483, -0.081486, -2.141028, -2.112175, 1.444213, -1.242927, -1.942442, -0.412966, 0.468290, -1.691532, 3.342434, -1.029143, 0.414474, -1.291863, -0.429215, -0.803098, -0.879434, 2.890400, -0.152064, -0.716110, -0.148828, -0.454403, 0.121447, 0.709472, 2.085076, 1.230407, 0.457659, -0.619703, -1.031744, 0.799820, 0.568080, 1.409062, 1.145725, -0.293734, -0.423526, 1.089540, -1.204836, -1.550930, -1.154674, 0.411535, -1.269684, -1.250055, -0.885318, -0.804515, -0.656637, 1.311883, 2.570914, -1.757858, -1.614506, 1.054425, -0.230994, 0.163260, -0.068314, -2.478331, 3.433658, 0.563837, -0.291979, 1.330703, 1.196056, 2.749060, 2.298057, 1.664460, 1.296717, -3.561041, 2.031934, -0.885923, -1.230606, -0.719654, -1.522524, 0.055912, 0.466891, 0.711304, -0.256905, 0.254127, 1.035667, 2.418933, 0.657365, -1.073165, 0.727122, 0.396898, -0.679162, 0.537641, -0.216880, 0.741149, 0.127517, -0.685507, -0.816983, 1.314867, -1.926546, 1.033232, 0.666667, 0.412173, 1.105362, -2.406342, 2.025838, -0.129165, -0.057019, 0.841208, 0.438537, -3.475091, -1.677753, 1.045024, -1.291526, 3.112645, -0.942207, -0.398658, -0.036666, -0.840055, 3.272867, 2.229147, 0.280921, 1.172878, -1.377426, 0.725625, -1.500038, 108.974983, -1.543268, 0.883873, -0.201575, -1.100014, 2.848984, 1.649240, 1.179316, 0.099843, -0.212120, 1.355394, 1.073641, 0.472136, -0.692718, 0.856604, -1.007322, 2.029020, -2.749115, 1.646423, -0.965607, -0.166112, 1.916322, -0.353382, 1.195508, -1.164256, -1.924362, -0.336077, -0.864081, -0.371600, 3.507040, -2.662761, -2.700086, 2.645764, -0.588335, 0.595821, 0.395873, 0.261532, 0.897741, -0.759526, 1.338342, 2.039644, 2.488708, 0.658869, 1.146101, 0.292945, 0.884184, -1.021913, 0.839312, -0.537249, -1.822289, -0.234070, -0.431272, -0.285895, -3.998769, 0.750883, 2.391869, 1.593652, -1.261236, -2.025501, -2.642597, -2.048967, -0.752406, 0.951185, -2.935954, -0.718500, 2.103240, -0.040910, -0.250542, 0.311411, -0.366309, 0.467215, -0.604038, 0.314239, -2.524184, -1.082193, -1.330313, 0.221131, -0.624326, -0.620772, -0.979783, 1.351199, 0.502501, -1.110724, 4.062208, -0.256656, 2.276846, -1.185725, 0.992122, 3.420039, -0.765948, -0.221500, 0.052288, 2.189865, -3.686619, 1.575352, -2.061088, -1.655098, 1.522987, -1.593854, 2.274162, -3.057110, -0.267945, -0.324140, 2.006741, -1.241994, 0.555711, 2.565888, 0.524792, -0.985646, -0.265675, -1.702239, -2.531488, -0.342326, -0.119078, -0.687255, 0.043045, 2.030811, 0.411119, 1.568364, 0.993975, -1.323289, 1.294146, -0.606770, -0.394903, 0.695981, 0.637678, 1.621926, 0.246330, 0.628986, -0.901297, 2.450891, -0.569776, -1.150095, 0.021318, -3.895612, -0.164137, 0.933785, -0.871159, 0.040331, -0.651584, 1.293960, 0.330892, 0.329866, -2.629156, 1.389845, -1.397733, 1.041358, -0.667219, 1.163123, 1.207297, 2.175764, -1.075607, 1.500163, 1.805551, 0.156989, -0.486652, -1.370991, 1.351088, 0.561534, 1.513762, -1.754055, 0.475647, 1.119235, 1.254973, -2.200987, -1.333424, 0.375751, -1.237126, -0.676050, 1.050322, -0.013615, -2.973356, 0.652008, -0.077755, -2.290762, -1.726803, -0.349222, -1.880172, -3.292858, -0.421512, 1.264125, 0.659902, 0.536645, -1.186392, 0.529164, 1.962576, -1.307417, -1.977710, -2.907781, 0.827774, 0.823504, -0.538302, 1.915013, 1.546227, -0.977086, 2.392261, 2.466423, -1.028928, -1.125520, -1.472190, 1.722015, -1.398091, 1.358133, 2.307282, 0.844900, -0.749014, -1.314827, 1.375018, 0.271454, 0.050423, -0.225399, 0.559803, -2.023571, 0.641929, 4.616908, -0.534789, 1.204665, 0.890745, -1.321189, -2.127558, 0.227066, 0.474820, 0.424672, -0.511897, -0.010376, -1.578745, -0.707301, -1.494194, 1.013243, -0.970379, 0.637081, 2.441757, 1.178170, 2.231665, 0.482194, 0.784995, 1.511568, -1.686735, 0.049919, 3.724557, -0.350886, 1.023152, 0.916885, 2.427388, -0.727273, 1.440410, -0.418736, -1.179116, -1.259873, 0.026272, -1.203515, 0.676995, 1.240330, -0.988959, 1.715686, 1.353497, 2.821455, -0.048384, -1.099469, -3.833539, 1.403490, 0.952264, -2.078660, -2.387847, -0.203457, 1.164888, -0.377990, -1.089652, 0.617669, 0.603560, 2.780058, -0.498905, -2.730738, 1.206661, 0.738163, -0.554591, 2.806867, 0.120156, 0.885852, -1.902021, -1.315665, 2.646314, 0.637651, 1.876910, 2.067616, 1.388101, 4.428380, -2.684014, -0.728300, 0.425805, -0.979354, -1.248050, 0.428526, 1.322177, -0.462034, 0.489450, -0.296501, 1.194134, 0.570492, 0.787840, 0.798111, -0.121259, -1.281682, 0.651666, -0.329396, 1.621351, -3.434499, -1.140202, -1.494462, 0.083887, 0.793408, 0.522874, 0.647781, 0.181029, 0.284212, -0.032588, 3.363986, -1.337602, 0.600369, 0.929805, -0.118197, -0.434214, -2.996208, 2.394993, -0.696582, -1.882186, 0.424430, -0.136489, -11.177244, 1.433617, -0.422023, -2.142893, 1.765504, 4.017715, 0.691570, 2.013083, -2.646734, 1.372750, -0.200401, 1.711830, 1.310737, 2.380119, 1.687647, 0.272407, 0.597892, 0.438291, -0.701525, 1.462026, 0.982752, -2.198225, -0.720614, 0.217967, 2.184405, -1.511539, -0.467931, 0.093546, 0.569584, -0.231853, 0.606410, 0.161362, 0.297654, -0.598280, -0.636738, -0.479192, -1.080690, -0.533575, -0.481348, 0.392146, 1.667896, -1.654058, -1.187042, 2.232192, -1.161160, -0.934447, -2.306565, 0.173936, -0.549353, -1.700276, 0.564659, 1.314084, -0.155444, 1.542174, -0.539523, 3.501718, -0.409948, -2.287433, -1.245231, -0.713698, -0.073254, 1.562305, 1.728484, -1.469147, 3.051828, -1.058781, 2.404095, 1.481896, -0.235652, 2.031891, -1.883109, -1.893306, -2.758252, 2.430271, 2.785730, -2.080566, 1.592087, -1.643902, -1.741971, -4.103412, -0.942100, 1.128346, -2.384707, 1.036259, -2.752170, 0.359940, -1.679895, -1.185395, 1.481621, -1.940301, 1.939124, -0.243517, -0.729444, 0.452522, -0.667398, -1.528768, -0.084491, 1.454401, -0.876688, -0.727226, -1.147725, -0.451758, -2.296567, -2.192117, 0.155676, -2.761838, 0.035215, 0.493224, -2.091021, 58.901432, -1.606073, -3.120444, -0.751568, 0.656560, 2.347460, -2.288090, 0.937204, -0.619620, 1.410374, -0.619253, -0.660450, 1.456668, 1.938942, -2.594143, -1.986019, 2.836044, -0.558226, -4.271777, 1.487002, -0.626783, 0.327943, -0.177811, -0.279220, 44.279232, -0.000625, 1.140469, -0.037682, 0.479224, -0.699490, 0.652760, 1.851329, 0.161828, -1.647348, 0.302765, 0.568568, 1.134713, -1.047642, 0.557566, -0.854736, 0.679620, -2.383316, 1.417158, 1.660055, 2.417681, 2.144057, 0.051083, 0.624508, 0.984353, -1.782677, -1.427122, 2.440223, -0.743944, 2.089868, -4.443433, -1.378514, -0.578853, -1.935174, 0.785260, 1.709202, 0.102794, 1.711256, 1.248609, -0.569197, -0.683822, -1.620399, 0.619570, -1.480553, 1.347544, -0.512033, -3.201872, -0.088561, 0.719905, -0.070067, 2.563033, -1.351368, -0.200104, 0.776716, -2.187084, 0.891719, -0.400018, -1.958174, 0.762099, -2.248331, -1.565500, -2.753872, -0.268325, -0.892033, 10.707685, 2.690685, 0.231237, 0.789252, 2.969221, 1.503455, 0.571084, -1.230273, -2.020150, 1.923794, -0.710662, 1.361673, -1.752455, 0.096719, -0.173957, -1.541730, 0.748128, 0.676815, -2.004625, 0.021753, -2.067259, -0.090012, -2.118023, 1.397831, 2.612525, -2.713913, -1.989865, -0.009225, 1.418425, -2.013482, -0.485722, 0.703349, -1.674182, -2.101350, -0.914536, -4.427722, -0.433252, 1.391613, -1.398082, 1.423048, 0.681397, 0.176488, -0.204174, -3.211823, -1.960403, -0.029697, 2.571298, -1.093950, 0.904089, -3.640918, -0.499479, 0.625533, 0.161181, -0.985214],
+	"starcoder2:latest":    [3.954906, 3.446956, 4.132194, -1.308157, 4.217354, 0.978980, -3.164102, -2.884384, 1.930067, -1.850368, 0.912804, -1.633436, -0.083181, 1.147634, 1.383439, -2.179369, -6.851675, 0.383219, -2.499716, 4.524372, -7.357098, -4.032088, -0.446788, -3.416954, -0.716143, 0.133193, -0.770150, 0.747172, -1.350329, -1.988400, -0.100476, -2.170013, 1.644468, 1.707521, 0.325533, -4.403326, 0.637300, 0.350682, -0.928538, -2.591375, -0.896269, 1.880500, 3.309983, 2.229650, -0.769332, 0.131835, -1.750641, -5.785780, 0.477680, -0.229142, 0.776822, -3.113634, -1.487448, -4.305194, 2.233401, 2.360487, -4.390745, -1.771400, -4.191641, 2.105277, 3.461224, 3.597554, -2.192061, -1.519642, -0.950530, -2.892504, -1.049523, 2.070168, -2.538044, 2.219159, -2.942192, 1.363763, -3.478725, 4.298923, 0.565158, -3.088574, 1.070849, 1.664940, 2.649776, -0.508506, -3.603013, 0.664562, -3.398930, -0.149324, -0.288706, -1.093250, 4.242371, 1.714020, -7.155812, -2.404553, -8.509516, 4.994543, 3.872673, -2.871006, -3.556372, 1.039953, 1.879293, 0.953682, 1.384863, 4.057693, 0.973630, -3.057381, 0.942924, -4.912657, -0.438702, -0.330589, -1.313686, 2.855739, 3.051247, -1.675123, -0.029737, -3.202343, 0.494662, -4.007298, -1.643547, -6.927251, 0.813801, -0.696914, 3.659765, 1.090904, -1.843107, 1.553456, -8.342607, 4.401372, -4.314304, -3.726803, 0.642371, 6.338482, -0.388236, 4.319686, -4.750969, 3.902664, -0.574822, -3.063889, 2.006676, 3.434633, -5.040134, 2.665939, 0.430951, 2.571163, 3.511206, 1.461066, 3.273216, -5.620604, 3.440011, 3.553609, -0.039653, 1.648325, -0.872830, 0.185128, -3.091564, 2.200620, 3.167208, -2.833151, -5.076702, -4.427409, 1.205788, -2.541477, 5.408942, 4.078667, 2.740433, 0.571368, -1.551391, 2.512275, 4.750610, 0.472481, 0.696102, 2.621961, -3.720625, -1.839476, -7.474308, -1.506337, 4.908046, -0.469909, -2.005977, 2.365716, -0.496384, -4.986031, 3.318370, 3.450526, 2.631732, -2.603594, 1.190445, 2.637537, 0.869908, 0.413760, -2.020536, 2.127237, -4.783207, 1.706683, 8.533383, -0.677833, -2.040554, -7.255683, 5.987998, 2.188214, 3.920970, 0.257084, -2.536839, 2.571880, -2.356964, 2.739559, -0.717192, -2.021268, -54.268299, 0.333840, -0.165839, -1.502152, -3.867745, -5.023274, 4.025485, 0.980240, -5.584783, -0.596023, -1.808546, 3.591976, 2.904098, -4.535067, 4.661890, -10.922905, -2.987491, -9.325285, 1.980194, -2.097212, -1.019022, 4.682246, -1.427983, 3.520185, 1.046766, 2.215647, -0.969066, 0.033140, -2.179222, 4.788574, -1.198171, -1.201237, -2.539800, 0.591574, -3.490026, -2.674280, -3.963340, 18.968071, 5.505445, -0.093186, -2.184116, -0.737323, 2.018108, 3.244152, -1.713053, -1.515601, -2.776108, 1.291257, 0.117369, -9.578768, 1.925400, 4.567656, 0.901392, 1.137718, -0.379492, 1.382629, 3.525820, -1.730144, 0.172482, 0.584189, 2.579916, -0.138648, 2.106573, 9.056437, -2.702915, 1.538335, 0.673111, 2.611344, -5.052717, -0.152712, 3.724612, 0.230983, 1.890635, -1.014450, 3.154704, -1.116773, -2.011335, 0.950536, 1.706761, -2.864771, 3.438492, -1.220277, 2.567598, 1.270461, 2.356857, -0.287333, 2.063555, -3.507069, -3.196346, -4.356922, -0.267165, -6.006845, 0.486452, -2.743304, -1.646718, 3.736859, 4.527998, 4.153322, -2.734528, -0.020917, 3.385880, 0.135446, 0.242599, -0.082168, -2.619100, 1.342911, 0.636880, -0.483083, -0.189432, 0.026360, -1.258501, 0.261005, 2.281953, -3.362994, 4.558819, 0.663074, -2.211503, 1.318164, -1.085270, 1.818389, 1.044821, 0.491438, -0.142887, -4.251420, 7.596611, 5.024518, -0.434129, -0.619839, 4.385494, 3.004915, 0.564843, -0.262337, 4.462246, -2.494302, 2.775275, -3.166047, 0.939039, 0.279873, -1.359035, -1.443188, 1.324596, -6.544474, -2.342992, 0.011879, -4.089714, 1.592429, 2.206237, -2.116853, -3.156819, 5.479142, 1.899718, 2.731661, -3.741627, -2.232041, -3.275277, 0.243284, 3.543710, 2.899739, 2.000626, 3.344675, -4.386779, -0.993285, 1.166946, -4.742619, 0.648653, -8.625524, -4.236928, -1.981542, 4.250235, 2.827621, -3.327694, 4.063057, -5.468178, 4.169388, 4.380604, -1.700271, -5.375949, -3.708664, -1.256779, 3.119228, -1.263999, -0.016020, 2.059651, -0.494237, -1.475365, 1.853182, -1.576096, 2.082677, 0.286721, 0.287749, -0.677656, -1.040530, -1.405734, -1.721848, 4.456102, -2.748078, -0.479098, 7.328283, -3.192072, -1.224766, -2.871030, -2.309623, -1.380889, -2.522825, 1.211852, 1.634454, -2.443470, 1.728376, -0.185181, 0.158396, -6.934468, -2.349255, 1.067602, -1.707984, -3.699120, -3.370043, -1.735215, -0.249390, 0.172209, -0.135927, -3.118189, -2.674917, 2.148926, -1.210936, 6.989573, 0.455856, 3.851568, 0.571797, 1.976329, 4.597493, -1.152350, 2.556751, 0.562547, 0.374311, -2.292119, 0.033077, -3.868658, -3.757327, -1.186643, -5.969309, 0.595574, 2.546074, 4.349101, 1.176634, -0.476572, -4.512696, -0.863966, -2.406134, 0.144893, 2.298353, -0.402904, -3.324984, 1.050415, 2.997760, -1.168943, -0.737656, 0.773710, 0.109672, -6.955970, -3.012873, -5.201923, 1.191964, 9.995472, -0.460779, -2.063271, 3.809098, 6.166883, -1.470808, -0.093703, -0.966475, -2.878819, -3.747328, 4.072700, 0.371183, 0.629432, 1.275924, 2.468330, 5.615418, 2.137433, 0.884437, 7.149430, 4.254181, 2.851663, 3.482609, -2.689646, -1.899801, -2.926574, 4.338265, -0.547124, 1.510705, 0.823134, -4.255880, -5.178019, -2.511326, 1.515535, 2.961271, 1.749568, -0.547877, -3.209148, 2.146317, -3.191516, 5.372285, 2.545877, 0.738861, -7.250521, 1.140111, -0.790099, -3.194763, 1.813850, 5.258968, -1.546785, 7.240694, 7.095726, -1.567679, -3.463788, -2.272077, 0.326402, 1.812266, 1.097216, 0.258934, 7.259093, -6.611300, 4.369743, 0.233526, -1.724230, 0.565005, 0.331715, 1.606365, -2.478334, 0.074496, 4.647484, 2.034292, -1.286420, 1.099764, 2.053190, 4.063726, -0.562997, -0.102809, 0.395045, -0.367260, 0.978149, 1.751679, -1.107427, 0.525725, -6.672098, 1.844507, 0.584685, 0.495569, 1.294837, 4.817148, 1.280627, -2.745418, 2.519278, -2.430676, 3.719332, 4.467507, 2.878108, -1.753611, -0.706735, -1.235836, -3.394101, 2.680115, 5.564773, -25.643671, 0.944054, -3.424004, 3.784788, 2.892951, 2.147227, 3.207940, -3.589079, 0.056116, -0.246300, -0.003123, 0.624385, 5.308627, 0.048532, -0.612551, -3.381337, -0.151319, 1.531758, 0.098887, -2.412398, 2.590293, -6.616521, 1.363917, -0.141630, -6.469071, -4.452425, 0.858186, -2.877003, 3.085377, -1.986627, 0.513879, 1.591452, -2.129533, -0.313005, -0.639644, 5.282188, 1.775640, -5.907731, 2.823747, -3.239740, 2.973734, -4.598226, -4.568226, -5.474308, 0.002731, 0.257645, 2.806026, 2.050392, -3.301049, -3.430546, -1.178443, 40.045464, -1.429249, 0.936941, -2.159034, 1.444639, -0.336448, 0.664087, 1.969398, -0.508663, -1.639841, 2.506701, 0.266379, 3.149135, -0.325703, 4.546886, -3.670993, 2.518780, -0.060650, 3.132485, 3.577308, 3.705828, 3.752754, -1.046455, 3.790464, -4.996606, 8.385803, 0.004652, 2.942170, 2.520080, 5.030700, -1.782003, 2.186366, 2.809453, -4.000122, -3.857088, -2.281824, -0.710834, 5.214368, -1.924514, -0.743523, 2.644260, -0.531909, 0.830519, -0.351119, -5.305862, 0.833811, 1.349116, -1.093713, 2.924214, 0.179195, 1.216857, -3.064413, -0.433600, 1.285622, 0.329023, -0.378243, 1.295149, 1.820416, -2.452358, 5.332327, 0.855822, -1.862306, 2.299675, -2.495447, -0.856743, -1.795716, 1.493013, 2.535764, -0.623357, 5.355479, 1.528928, -2.333948, 1.343511, -2.886343, -4.315742, 1.494795, 3.670878, 2.528954, -4.513095, -4.793800, -3.309853, 5.122548, 1.078647, -3.852395, -1.237825, 0.578560, 0.151397, -0.313312, -2.353152, -0.925905, -2.447389, -1.272905, -0.966679, -3.534346, -1.952816, -1.391251, -5.503417, 3.083206, 1.603203, 1.594700, 1.879373, 2.644297, 1.984598, -1.328263, 10.436472, -1.211096, 0.258482, 1.039371, 2.810867, 1.315435, -1.010990, -6.820943, 3.245251, 2.012278, -0.485755, -1.931857, 1.755673, 1.319367, -7.041339, -0.120645, -2.525263, -1.041744, 0.461637, 3.536187, 1.663792, -1.046046, 4.882263, 1.606150, -1.028896, -3.787328, 1.720849, 0.990523, 1.311473, -0.380394, 2.589013, 2.159781, -0.235688, -1.006128, -1.421028, -4.730341, 2.582766, 0.617903, -3.711154, 0.329820, -1.163525, -6.910535, 1.042382, -3.019486, 5.790540, 6.632965, -1.451595, -5.548892, 4.663946, 0.774761, -0.516832, 1.734756, 0.712267, 0.441946, 5.133786, 2.175959, -1.424711, 2.850309, -4.076188, 3.363111, -0.146001, 4.453137, -7.930290, -4.174343, -8.381124, 2.078207, 0.817420, -1.242864, 3.261016, 0.755861, -0.831756, -1.486270, -3.102542, 0.572175, -1.355858, -1.116844, 0.988796, -0.765274, 0.535530, 2.798912, -0.872012, 1.312495, -5.169959, -2.461925, 32.000229, 3.070940, -3.010790, -8.231589, -0.311377, 0.026132, -2.744881, 1.916952, 2.714581, 0.238231, 3.783271, 4.726935, -1.905172, -0.647758, -36.757748, 3.181995, -1.353556, -2.416937, 2.320525, -2.297328, -7.907078, 1.462360, 2.113646, -0.573712, -8.758898, 0.858195, -0.652538, -1.551567, 2.702150, -2.158720, 7.030653, -8.981299, -2.873467, -3.894970, 0.079853, 1.225966, 2.812419, -3.370865, 1.705058, -0.797177, 1.395418, 1.474460, -1.563024, 2.567413, -0.129698, -3.298024, 1.761030, -3.790629, 3.480512, 4.441885, 3.906168, -1.588898, -2.205953, -0.735543, 6.266836, -1.045668, -4.019965, -2.978006, 4.059746, 1.515308, 2.426685, -1.051778, 0.243209, -2.733144, -0.939245, 1.628108, 1.924429, -1.988333, -1.907789, 6.501453, -5.372938, -3.692094, 2.084061, 2.598653, -2.129409, -1.099973, -2.259107, -1.812507, -3.265822, -5.205757, 3.254252, 0.549243, 4.423184, 3.607155, 3.125645, 0.110007, 1.579380, -0.820327, -4.340643, -1.246021, 2.509706, 2.404279, -1.848830, -3.794203, -5.309284, -1.768488, 1.953625, 2.033994, 1.841250, 3.990752, -0.339395, 2.897047, 1.390943, 7.685173, 2.497483, -2.268698, -4.039967, -1.410298, 5.251214, 0.412185, -2.122941, 4.800458, -2.148592, 2.614200, -3.319689, 0.214866, -1.062898, 0.094038, 3.353088, -0.151354, -1.451349, -0.436699, 2.316608, 6.956083, 0.809792, -4.175000, -2.111322, -5.318207, 0.920999, 3.898372, -3.036911, -3.944599, -0.053519, -1.137218, 0.923195, 3.309731, 1.307382, 0.471838, -0.557837, -1.457152, -10.488117, 4.947997, 3.032650, 0.926141, 1.031303, 4.860070, 1.865988, 4.291333, 5.294878, 1.033966, 0.655021, -2.618088, 2.143144, 2.635707, -2.914068, -1.279016, 1.189715, 0.900603, 0.150104, -3.965141, 0.084981, 3.026685, 5.314412, 3.465086, -3.757115, 1.248792, -8.722107, -2.305587, 2.938486, -4.834834, -1.033344, -1.279896, -0.816648, -3.617774, 1.542475, -38.896088, 3.234958, 3.912147, 2.612395, -3.285373, -2.863140, 2.881858, 4.916956, -2.752369, -4.856666, 0.561042, -1.688848, 4.023190, -2.463728, 3.498123, 1.399833, 0.355092, -1.268783, -3.607190, -0.907120, 8.478541, -2.200597, 1.559901, 1.995240, -4.085650, 2.091830, -3.321615, -3.862751, -1.994243, 4.994206, 0.906812, -5.086945, 4.253753, 3.185349, 3.300971, 2.385611, 1.160899, 1.974533, -1.013438, -2.642955, 1.195869, 5.310802, 5.468049, -2.974200, 2.939312, -1.719330, 2.104474, 4.294636, 0.418233, -0.910012, -6.338346, -1.003924, 3.763436, 2.168355, -4.610764, 20.133694, -4.702892, 0.358582, 0.795131, -1.072905, -1.151668, 1.011330, 3.718589, 3.466958, -1.727253, 0.007756, 24.925507, -2.706746, -0.096896, 0.406167, -3.355074, -1.374650, -0.400644, 3.200829, 1.512507, 3.596913, 1.511441, -5.696938, -5.842372, 2.388058, 2.873770, 2.871197, 3.141411, 6.866806, 1.181306, -2.315731, 2.846997, -2.763558, -2.466458, 2.600410, 3.115498, 5.438047, -0.957668, 0.723373, 9.326761, 2.562903, 2.477052, -1.883352, -4.561812, 2.033604, 0.450944, 3.900448, -1.410911, 2.496872, -0.951807, -1.702957, 0.440785, 1.007453, -4.055562, -3.651521, 1.038782, -1.596580, -2.893723, 4.680642, -4.607155, -1.961638, 0.053415, 0.057291, -0.581496, 1.113361, -4.579621, -2.124156, -1.471331, 1.703052, -3.977679, 1.975540, 1.071642, -3.896637, -3.192747, 3.893468, 1.125908, -1.617653, -3.302320, 0.090005, 3.834193, 0.864084, 2.959042, -0.238062, -2.339349, 0.867953, 0.607959, -2.510954, 0.496669, 4.439073, -0.785287, 0.793577, -2.958039, 1.621440, -1.623561, -0.562564, 4.638718, 6.613771, 2.172550, -4.948812, 1.765566, -0.272064, -0.381970, -2.758515, -5.700199, -0.509347, 3.595506, 0.349293, 1.786550, -5.735712, 1.657908, 2.420370, 1.021307, 4.288211, 0.659743, 2.595690, 2.131526, 1.660173, -2.757504, 0.155179, 0.057258, 1.621030, 0.375329, 3.359759, 0.522753, -3.821949, 7.317401, 0.872343, 2.317105, -6.278262, -1.124390, 2.805756, 3.533095, -0.392534, 1.936733, 0.632991, 1.020454, -0.789944, -1.634524, -1.096838, 1.429845, -3.976567, 2.014116, 0.164936, 4.276067, 0.580995, -2.650581, 0.627567, 2.831709, 7.629052, -1.588510, 1.669903, 1.566437, -1.158298, -4.041847, -0.363744, 2.566550, 0.732395, -6.146594, 4.792378, -2.375965, 0.086666, 5.705215, -0.609236, -1.750620, 0.383327, -6.134190, -1.646948, -2.378367, 2.136676, -4.344128, -1.913511, 4.792160, -2.104047, 2.290141, 0.619961, -2.841638, -1.149025, 1.532447, 8.079632, 0.167728, -0.812451, -5.725507, 2.813900, -12.818064, 5.116618, -0.522779, 0.827081, -0.488727, 5.936249, -0.598387, 0.827499, 2.470267, -3.620100, 2.557060, -3.704692, 3.047684, 3.675898, 6.186685, -0.115018, 2.624267, -5.155997, -4.354460, -1.568762, -0.709346, -1.468610, 0.401905, -2.583840, -0.987820, 0.275011, -4.163403, -0.613211, -2.807535, -0.424523, -0.452741, -2.167123, 5.778756, -0.681994, -2.429863, 0.223221, 0.201511, -0.543092, 2.036425, -2.328058, -0.447419, -1.194997, -1.101472, 2.362241, -0.523458, 1.641637, 4.413494, -3.409977, -5.536853, 6.102106, 5.024897, -5.290074, 0.052775, 2.071835, 0.582469, -1.089358, 3.547235, 2.933052, 3.950668, -2.233588, -1.770502, 0.913889, -0.373818, -2.158972, -5.722299, -0.515570, 3.150634, -3.374096, -0.672185, -2.349066, 0.423277, 4.680716, -1.405044, 0.149807, -2.194840, 0.511928, 5.358116, 1.277793, -0.073511, -0.607695, 1.210479, 3.695651, -1.760374, -1.375394, 0.694058, -2.702893, 1.123261, -0.752893, -0.721043, -0.008803, 5.108900, 3.010632, -3.726272, 2.137026, 1.155603, -4.815225, 1.123165, -4.499853, -2.871032, -2.869565, 1.640056, 2.883875, -2.753560, 2.374418, -0.525743, 0.754760, -0.191842, -0.728661, 3.039357, 0.417412, -0.310582, -1.083713, 5.632096, 1.864744, 0.754898, 0.392937, -0.536663, 0.609380, -2.309850, 2.671967, -1.097407, -3.362546, 0.614686, 0.498423, 1.612521, -2.590021, 0.955710, -2.411512, 0.386386, -5.251279, -7.841381, -0.590958, -2.363035, -0.070098, 4.583270, 1.687753, -5.833133, -4.610497, 1.243317, 0.787212, 1.516608, 1.190986, -4.180897, 2.236767, -1.813234, 1.337824, -1.599516, -3.677811, -26.237354, -1.201527, -0.976645, 3.695266, -3.046397, -0.612185, -0.403214, -6.037153, 3.486489, 1.110454, -2.614597, -1.813726, 3.819050, -3.899067, -2.189036, -1.128309, 4.495033, -0.690091, 1.605721, -4.417164, -2.698482, 3.989815, 6.847924, -2.491783, -2.521626, -0.033084, 8.455603, -3.503982, 3.065771, 0.794108, -1.092266, 4.623037, -2.361975, -3.687104, 2.658545, -4.387690, 0.838877, 0.090506, 0.399950, 1.262949, -2.685220, 4.460542, -1.484809, 1.970316, -1.609233, -4.424563, 0.721127, 1.493592, 1.255998, -3.487755, -3.881172, 0.427922, 2.569118, 0.065877, 2.816322, 1.152758, -0.501664, -0.407259, 1.829424, -1.862841, 2.326450, 1.251915, -0.890167, 8.041068, -0.464253, -5.633308, 0.973143, -1.270512, -1.123637, 0.414263, 2.355236, 5.072489, -4.236382, -3.017046, -5.539206, 0.427342, -2.033043, -3.006998, -2.034348, 2.991414, -0.888073, -0.284665, 0.764979, -0.311018, -30.389788, 0.295668, 2.223618, -4.418258, -0.228846, -0.868055, -0.286201, 1.586053, 2.322705, -2.874192, -3.909887, -2.716074, -0.632933, -1.315264, -1.857809, 2.162390, 0.581108, 4.548195, 3.339464, 3.113324, 0.260880, 2.201258, 1.306216, 4.046004, 0.226894, -0.154268, 3.058372, 1.441418, 2.402032, 1.295249, -2.624159, 6.831582, -1.634226, 0.016093, 1.380820, -1.251443, -6.686980, -1.877969, -1.170521, -0.148758, -3.604551, 4.608958, 3.159536, 6.339041, 1.586969, -3.169895, -1.270715, -2.643064, 1.292438, 1.746339, -1.821995, -4.091353, -3.314147, -1.255694, 6.793447, -5.778502, 1.311826, -3.195199, -1.776936, -1.542572, -1.340883, -0.471980, 12.039534, 1.598309, -3.289486, 2.143849, 7.559578, -1.072683, 2.660873, 0.178650, -0.823664, -1.195172, -5.483694, -2.724475, -1.046455, -2.970822, 2.370901, 1.326905, 2.699673, 4.745267, 3.315630, 1.795283, 4.534509, -1.104437, -2.619473, 2.310451, -0.207106, -1.756807, 1.944235, -7.406075, 2.360746, 1.695488, -1.573009, 5.868517, 0.632977, -1.449851, -2.978994, -5.217463, -3.489958, -1.462943, 3.673938, -3.456507, -3.669782, 6.829182, -0.014763, 0.705192, 4.692063, -0.884456, -0.521573, 3.395014, -0.574015, 1.318436, 2.451267, 2.462721, 0.067645, -4.006672, -5.101906, 1.233049, 1.874316, 0.475919, 0.348529, 2.598935, 3.047530, 3.175739, 1.714644, 2.201727, -2.770246, -3.782538, 2.618452, -0.984300, 2.884197, -1.390444, -1.016022, 4.748641, 5.434885, 2.662480, 1.714454, 4.043160, -2.636191, -4.603223, 2.327946, -3.645967, 7.918791, -2.157629, -2.806252, 1.060982, 3.452957, 2.139525, -2.042349, 0.693860, -3.820629, -4.664983, 0.846234, 1.978720, -1.999649, 0.705241, 0.508306, -4.941313, -0.472151, 0.620729, -0.734521, -7.761682, 1.561712, 5.253155, 2.527959, -4.672490, -0.972164, 4.680885, 1.364908, -3.930770, -2.790518, 0.518415, -4.114025, -1.543985, 1.640930, 2.419094, 0.260200, -1.186129, -2.653908, -4.087616, 1.915277, 3.987131, -1.384313, -2.631912, -1.384115, -1.071472, -2.785765, -2.181538, -2.885599, -0.401276, -6.260543, -3.641433, -0.454064, 3.221514, 5.539521, -7.809373, 2.057474, 3.860055, -0.889300, -4.335176, -6.850449, -0.349884, 0.461714, -2.370055, 2.600246, 3.713120, -1.530666, 4.533460, -5.503286, -1.099606, 1.367768, -1.353006, 0.527998, 0.605875, 3.025352, -2.243968, -7.535481, -3.645490, -9.158818, -2.249825, 1.013910, -1.286187, -1.565031, 1.689706, 4.758728, -3.682729, 2.832422, 0.759288, 3.222937, 0.980088, 7.682557, -0.236019, -1.305568, 5.229431, -0.028067, 3.350055, -6.278828, -6.820782, -5.137558, -1.602286, -4.789843, 1.372694, 7.766475, -0.262632, -2.088757, 2.054604, -0.873996, -3.777221, 1.057147, -1.788633, 0.636675, -3.246207, 4.191454, 10.845826, 2.184264, -0.663242, -2.994276, -3.822319, 2.048127, -2.415215, -5.284320, 0.379661, 0.770331, -2.962476, -9.441674, -1.733733, 0.313156, -7.212578, -1.284574, -0.594387, 1.904230, -2.360615, 0.960972, -0.756241, 5.057050, -3.848195, -0.850209, 1.989755, -1.932005, -3.434974, 1.126740, 0.189969, -5.973492, 0.497733, 0.502772, 0.983130, 1.428358, -0.880303, 1.468601, 4.044591, 4.903339, 6.451278, 2.301094, 1.285675, -6.867669, -3.877277, 5.125129, 0.206669, -0.732105, 1.676614, 1.967732, -3.292951, -2.084330, -2.661968, 5.956147, 2.431339, 35.113945, -2.903124, -5.854738, 1.690528, 0.256790, 1.409627, 0.245409, -0.751060, -2.545308, 1.832971, 2.343396, 0.981405, 0.298588, 2.361303, 2.873193, 3.184145, -0.578079, 7.647470, 1.286948, 2.470084, 2.074987, -3.704046, 2.094772, 1.427298, -2.048329, -0.852479, -0.147512, 8.952838, -1.212160, -0.013429, 3.119918, 3.027362, -1.793503, 1.816219, -4.171658, -1.222634, 1.598949, -0.425474, -4.008621, -0.180907, 1.435951, 4.180890, -2.889360, -0.643184, 2.107976, -2.184762, -6.804729, -1.313931, 6.336479, -2.715487, -8.017049, 1.290528, -1.088894, 1.836897, -2.769671, 2.145877, -3.002848, 1.399926, 3.115015, -0.575049, 3.000286, 1.236908, -0.665988, -0.811098, -1.520874, -0.406334, 0.081887, -2.804508, 1.811796, -1.327266, 3.629619, -3.051250, 5.777524, 5.625944, -0.074724, 2.295986, 4.644068, -0.943301, 0.499157, 2.743801, 0.288521, -4.547318, 1.480684, -1.903996, 0.755657, 6.564864, 5.294844, -7.152912, -4.054803, 0.867254, -2.116263, -1.725379, -0.237372, -1.343886, -1.357473, 4.509637, 2.103033, -0.916625, -0.122734, -0.358285, 4.174424, -5.610752, 9.703909, -3.241749, 4.983339, -3.434541, -0.940305, -2.995391, 0.774271, -3.800056, -0.785292, -6.434474, -3.552718, 2.490310, -3.969891, -0.159337, 1.611900, 0.692022, -3.616428, -0.751369, -1.386437, -6.936826, 2.289046, 0.809470, 7.176322, -5.247258, 0.099220, 2.229184, 2.014560, 0.463515, 24.263531, 7.516344, -3.366016, -1.453768, 1.082829, -4.192920, 1.545274, -8.204566, -1.746497, -0.660806, -0.428469, 3.773864, -2.924908, 5.075720, 0.441147, -2.315868, 0.052775, -2.973179, -0.977929, 7.137644, 0.139646, -2.662629, -4.508866, 0.448624, -4.459792, 6.988142, 3.280386, 2.495726, 1.759314, 1.869679, -1.649343, -3.923743, -1.916861, -1.756116, -1.214166, -0.954839, 2.016791, -0.236211, -4.287428, 1.482038, -0.924025, 1.470933, 0.904945, 0.108155, 0.674055, -10.751386, -4.172389, 4.201840, 4.954018, 0.124576, -1.535858, -1.969235, -0.285039, 2.575959, -4.651704, -2.613804, 2.937226, -1.160048, 5.015957, 5.243375, -1.127164, -4.034823, -2.547338, 5.067166, 0.726256, 1.335407, 4.330610, 2.297170, 2.085455, 0.371684, -2.918300, 1.720158, -7.416587, 1.883161, 2.180356, -3.423670, 3.888726, -2.135635, -1.600958, -5.953264, -39.905117, 8.047747, -0.590927, -12.720160, -1.936969, -3.425652, 4.325792, -4.519634, -2.195782, 4.614402, 2.050029, -4.219336, 10.337938, -4.975234, -0.212531, -3.067214, -0.317205, 3.844498, 0.520008, -4.234404, -0.187393, -0.426870, 5.068228, 0.575723, 2.555439, 2.293116, 6.937310, 3.229408, -0.087549, -2.854950, -4.198129, -3.142676, 2.358586, 1.574875, 0.238579, -5.262257, 1.197025, -0.675573, 1.292409, -1.297141, 0.787200, -2.647539, 1.293134, -0.284689, -1.265714, -1.683593, 3.952523, -5.629538, -0.177936, 4.821568, 0.468734, 1.553631, -22.561216, -3.194087, 0.641658, 0.647794, 4.655758, 8.161758, 1.088832, 2.065722, -5.100087, 4.852927, -0.296413, -1.453670, 3.208039, -0.851521, -3.539480, 0.542105, 1.586682, 7.371803, 0.131287, 3.124378, 1.897563, -2.975857, 2.867676, -0.768058, 2.120100, -0.031692, 0.476925, -1.876388, -4.271606, -3.991953, 3.527751, -2.635496, -3.599462, -5.120028, -6.221442, 2.327266, 0.809617, -0.387147, 2.548201, -1.121036, -3.484795, -1.402988, 2.415949, 3.418981, 0.417257, 0.903939, 1.734160, -2.755414, -6.037165, 3.559899, -5.386902, 4.435638, -1.071356, 2.335263, -6.863605, -3.746332, -4.198571, 1.714893, 2.001045, -0.438175, -2.384535, 6.253733, -6.634218, -2.326913, -3.920566, 1.712523, 7.929015, -2.293629, -3.360520, -1.844465, -3.437568, 3.067866, 0.948846, -2.291683, 0.762849, 2.465610, 0.906762, 0.129689, 0.416989, -0.638576, 3.360513, 3.763278, -4.966182, -1.986309, 3.747531, -1.232796, 1.630115, -6.572279, -7.473110, 4.103313, 0.916506, -5.498175, -1.645774, -0.651836, -1.021878, -1.871121, 2.433188, -3.028378, -2.300551, 2.270210, -3.892125, -6.832912, 0.811403, -0.145563, -4.302457, 4.773832, 3.033714, -1.877802, 2.448671, 0.271501, 1.455012, -0.938483, -1.683572, -1.313281, -0.491662, -3.128110, -1.350108, 2.448845, 1.547712, 1.351034, -1.801391, 2.674956, -1.573676, -1.242364, 0.999008, 1.600701, 1.566497, -0.057011, 3.053138, -1.975653, -2.304484, 0.093347, -2.296474, 1.850713, -0.179993, 5.420606, -2.297353, 3.769445, 2.210748, -2.572461, -1.740291, -0.005308, 4.902779, -1.297032, 0.609557, -1.101383, 72.558342, -0.088271, 3.113629, 206.012009, -3.240754, 1.021218, 1.958277, 1.962433, 2.951267, 2.812891, -2.298386, -3.912461, 1.429459, 1.525793, -0.895539, 3.264507, -1.131779, -3.443517, 2.119965, 2.405210, -1.633876, -4.738081, 2.565144, 1.576541, -3.236324, 0.407250, 0.994346, 13.261386, 4.742578, 1.851185, 4.192485, -1.279148, 5.030614, -7.137955, -0.224859, -1.275927, 1.304742, -1.494409, 2.443050, 0.169893, 0.740986, -3.160841, 5.052903, 2.456794, -2.542618, 3.678230, 0.542461, -7.071412, -0.258902, 0.908643, -4.486769, -4.286944, 2.765028, -1.253897, -5.738708, -0.480136, -3.865167, 2.628675, -3.531616, 1.489231, 1.834054, -4.295033, 0.548741, 2.876899, -4.153857, -1.585355, -0.407849, -1.639800, -2.019001, 0.013635, -3.680586, -2.273016, -3.334958, -4.988601, -2.093794, 4.899792, 0.107494, -0.564911, 2.926087, 2.974469, -4.776313, 2.406165, -4.207486, -3.598234, 1.092422, -4.990727, 1.851739, -0.836149, -3.069391, 1.130893, 4.755144, -2.043385, 7.117757, -0.450315, 1.329819, -3.382784, 0.312315, 1.380183, 2.948128, 0.843334, -0.101508, -1.195744, -1.218848, 4.023391, -8.832312, -3.381735, 1.129437, -1.167355, 5.336382, 2.976506, 5.820520, 1.737769, -4.227187, -2.067090, 1.075595, 10.285301, -1.987714, 0.394144, -2.892544, -2.382363, 1.020431, 1.569579, -1.949997, 2.014963, -1.564843, 2.930238, -17.482843, 4.599751, -1.369918, -1.032338, 1.396836, -4.724452, -1.896716, 0.230207, -0.495559, -1.834428, -3.406779, -0.617727, 3.613106, -4.981169, 4.388629, 2.516419, -6.392951, 2.072604, 1.078797, -4.091152, 0.443447, 0.429835, 1.048671, 18.735947, -6.367274, -1.449311, 2.331825, 0.894631, -5.131912, 0.580041, -0.850144, 7.598893, 1.336362, 1.858220, -2.481395, 2.120193, 1.216430, 3.656274, 1.865811, -1.271263, 1.636675, 6.872675, -1.059788, -2.045355, 0.609584, -3.676151, -4.326444, -0.923013, -2.306735, 2.047832, 0.150752, 2.024965, -0.756336, -0.620774, 1.030856, 0.376920, 0.553986, -1.883437, -1.971918, 1.731024, 2.353764, -6.111332, -0.304377, 1.470879, 5.560238, -2.615870, -2.650940, 1.433940, -0.051230, -1.967335, -7.607589, -1.797252, 1.446501, 2.473147, 3.638485, -0.219974, 4.721615, 3.223171, -6.518932, -4.603350, 0.715677, 6.245849, 1.733042, 3.131284, 0.325963, 5.154425, -4.297323, -0.021415, -2.278406, -2.710562, 2.181804, 2.193472, -1.018878, -1.759673, 1.366682, -5.043843, -2.841945, -0.935735, 1.922798, -0.862821, 2.772899, -3.891979, -3.744489, 2.147132, 0.569339, -5.099829, 0.005366, 2.357354, -3.178867, 0.264880, -0.851968, 0.595377, -2.278501, 4.848483, 0.118224, -4.583533, -0.214037, 2.697597, -1.440357, -1.666116, 0.851293, 0.883211, 1.024516, -0.896234, -1.503313, 4.888958, 1.611460, 3.238015, -3.881551, -1.781527, -0.476086, -0.982325, 2.661318, -6.942019, -1.394752, -0.423972, 2.271741, 0.152605, 0.238435, 2.724885, -0.393306, -1.051537, -0.960650, -0.861564, 2.063864, -3.151954, -1.856677, -0.298613, 3.787447, 3.413723, -1.369631, -6.394962, -0.264581, 0.143311, 0.867922, 0.935386, -3.864798, -4.037965, 2.477060, -5.491480, -2.805309, 5.783478, 0.803354, 0.520973, -0.135130, 1.370257, 1.149127, 5.635244, 1.921973, 94.129715, 2.089229, 3.259336, -0.143102, 1.138054, 2.748802, -0.038475, -0.070011, -0.762176, 2.708007, 4.141964, -3.584219, -0.032396, 3.259036, -0.025447, 0.818326, 1.917706, -1.155905, 8.985096, -2.741974, -1.218162, 4.030367, -1.527926, 1.365472, -0.251965, 0.770671, 8.414336, 2.682959, -1.959863, 4.116392, 1.393741, 1.232316, 1.978609, 1.709247, -0.502165, -3.071361, -0.324096, -1.026302, -6.424647, 0.446499, 0.213769, 3.803774, 6.658650, 6.602856, -1.974688, 2.762466, -2.043093, 0.879476, 0.919183, -0.247929, -0.853931, -3.744264, 2.538427, -0.737557, 2.740392, 3.303057, -3.512236, 3.102843, 5.220832, 0.372342, -1.047339, 4.278565, 1.036786, 1.029258, 5.107583, -18.135500, -1.597473, -1.459437, -1.424137, 0.107285, -1.398612, -1.167413, 2.997065, -2.405479, -1.376471, -0.230739, -3.623855, -1.596153, -2.854612, -2.279795, 1.908002, 4.536153, -4.411760, -3.900412, -2.842536, -0.056337, -0.574373, 3.723182, 2.061563, 1.997676, 1.559551, 1.319488, 0.216698, 4.750317, -1.264838, 2.076683, -3.207131, 0.603287, 2.815248, -4.284519, 2.447892, 2.313815, -0.990933, 1.550352, -2.597172, 5.693384, -2.378237, -0.355189, -9.217489, 0.986259, 2.050986, -0.734036, 0.513840, -1.256083, 1.010584, -3.890242, 3.109879, 2.664329, 0.235304, -1.298546, 5.845055, -0.618577, -1.712930, -2.466341, -8.814952, -2.423369, -3.853379, -2.330503, 1.720881, -2.549619, -0.917371, 0.750732, -4.258095, 4.545292, 16.827070, 4.341411, -4.160776, 7.871614, -1.460008, 0.916908, -2.542533, -1.021814, -2.370642, -1.075275, -1.789817, 3.117935, -1.870062, -5.916857, -0.094055, -0.352700, -5.319837, -35.927345, -0.691074, -4.044396, -2.477181, -5.232465, -0.339948, -1.177866, 0.845809, 1.695227, 1.008889, 2.832309, 2.738217, 1.445063, 2.312360, 1.750933, 0.846054, -0.581590, -4.028019, 2.292118, -3.794464, 2.565845, -0.122276, -3.266358, -3.763474, 1.849701, -4.350887, 0.039514, -3.265985, 5.101483, -0.556162, 0.564820, -6.121221, 6.639935, -1.289618, -2.559231, 1.406619, 4.442874, 0.319820, 0.726050, -2.547207, -2.503426, -3.586128, -3.681639, 3.263022, 5.335816, 8.684458, -6.611440, 1.283098, 0.481837, -5.526846, -2.499801, 1.207610, -1.121897, -2.016232, 12.473808, 2.213367, 7.477863, 0.911727, -0.473529, -5.328060, 1.499891, 2.401043, -2.129521, -3.428266, -2.606005, -4.792294, 1.989314, 1.814506, 0.835620, -2.496084, 38.477718, -1.623340, 4.445875, -0.332153, 5.350834, -4.440343, 1.359097, -3.471869, -0.278807, -1.668480, -0.941456, 0.021951, -6.308822, 0.888352, 1.880654, -0.754093, 0.778417, 0.146554, -3.647994, -2.967704, 4.578904, -2.534475, 4.675762, 0.427737, 4.265275, 0.919006, -2.431893, -1.910403, -1.926558, 1.929821, -3.158678, -1.706776, 1.820189, -4.834085, 1.676722, -1.890408, -1.759340, -1.433258, 4.960869, 0.075965, 0.177702, -1.349815, 5.169975, 2.521596, -1.891266, -0.400549, -2.465737, 1.221569, -0.392953, 6.994193, -6.160126, -0.975779, -3.955857, -0.170822, -1.156463, -3.122205, 4.754805, 2.628625, -1.413216, -3.647281, 3.438949, -2.969725, 2.559430, -1.363331, 1.760466, -0.156625, 1.011131, -4.567613, 3.126194, -0.404693, -1.913285, 1.964376, 2.013246, 1.856814, 1.823909, -2.459622, -2.986393, -2.112360, -3.993928, -4.515164, 3.671026, 4.975029, -2.019345, -0.476565, -4.061290, -3.628844, -5.407526, 0.859315, -0.503966, -2.052165, 4.601514, -4.284617, 3.929847, 3.930277, 2.817606, 4.260955, 0.729660, 3.674749, -1.223544, -3.924620, -10.879255, 5.591162, -0.591958, -2.760924, -5.799543, 0.370892, 7.387153, 3.102391, 1.636241, 2.026492, 4.056646, -3.243164, 1.223184, 3.922287, -0.906492, -0.105583, -1.605257, -3.096182, -5.482349, -2.640560, 1.742104, 0.383764, -4.236398, 0.498601, -3.024589, -0.461563, 4.091776, 0.207427, 1.422462, 0.358810, 0.666352, -4.161722, -2.019466, 0.057862, -2.731038, 1.107358, 1.186801, 3.940192, 1.199561, 2.998346, -4.315383, -5.012862, 2.053609, 1.661753, -3.678882, 0.673520, -4.182108, -0.535980, -1.416115, -2.901843, 1.458763, -3.259208, -0.166907, -15.877787, 4.388753, -0.407753, 4.388493, 1.758415, -5.531496, 0.706816, 5.436209, 0.724395, -3.395930, -0.075715, 0.524230, -0.771542, 2.852838, 0.332570, 2.345337, -2.167099, -1.282536, -0.774624, -0.220040, 2.654893, -2.342539, 5.738266, 1.717345, 0.186199, 2.949594, -0.056166, -1.494538, 3.921188, 5.117081, -2.492425, -3.140533, 0.325797, 4.341228, -3.385640, 0.344608, -2.186913, 1.999470, 6.138517, 1.785546, -1.332933, 0.677446, -0.680150, 1.780975, -2.377671, 2.068444, 1.805536, -3.477414, 0.581456, 15.761013, 1.452620, -2.738906, 4.598837, 1.583359, -2.947290, 4.835289, -0.983193, -2.914901, 0.862585, -2.631010, 0.329822, 1.898617, 0.493772, -1.386106, -2.557891, 3.673988, 1.138950, -1.672374, -0.077062, -4.477095, -0.588584, 2.087775, -0.897031, -5.305494, 3.123887, -4.255782, 1.186395, 2.710909, 0.683406, 3.008036, -0.048826, 0.120073, 4.015583, 2.089087, -2.089996, 0.725893, -1.093260, -1.587236, 1.879516, -1.272824, 1.640219, -5.574955, 0.845441, -4.265052, 3.373461, 4.076551, -3.102997, 0.358034, 0.123620, 3.719684, 1.690073, -1.284312, 0.198106, 3.568200, 1.303581, 3.197544, 0.379994, 3.425193, -4.718246, -3.075102, -1.404393, 2.956470, 0.613643, -3.138955, 4.275066, -1.796808, -0.584166, 0.725551, -0.014106, 0.002392, -0.029861, 5.503566, 2.093366, 2.121698, 2.123091, -3.677660, -0.848140, -0.462519, -5.223706, -1.811343, 1.634526, 2.982064, 3.992223, 1.853437, -0.709607, 3.809213, 2.474880, 1.395476, 0.957667, -3.412217, -4.581048, 0.840215, -0.727646, 0.040299, -2.873080, -5.684605, -1.440983, -0.104712, -1.534961, -0.425050, 0.696437, 0.796181, -7.457066, 4.581358, -0.555860, -0.574814, 1.785043, -0.783685, -2.932360, -2.846819, -1.537817, 0.805327, -1.850628, 0.171768, 0.823735, 1.831761, 1.635365, 8.369469, -8.393724, 2.998655, 2.864546, -5.140690, -2.487573, 1.948721, -6.674129, -0.333719, 5.916525, -1.243209, -1.976587, 1.527990, 2.096503, 0.127279, -8.390700, -4.807900, -2.087375, 3.248238, 0.314544, 5.246479, 2.012801, -2.588027, -0.426925, 3.381694, -3.283866, 2.181428, -0.179422, -1.505057, 1.304264, 5.389224, 4.507925, 4.740923, -6.174764, 0.920102, 5.463493, -3.661193, -8.281869, -1.901932, 4.642286, -0.482367, 0.000062, -1.021018, 3.470250, 0.845180, 1.008148, 4.595012, -2.994130, 1.620082, 0.685992, -2.699721, -2.641316, -3.471750, 6.495037, -1.260840, 1.917363, -1.188297, -1.971188, -1.512081, 3.544799, -1.659882, -0.570012, -1.171403, -2.391776, -0.243539, -3.987116, -1.305047, -1.665626, -3.211532, -3.930502, -2.119045],
+	"gemma2:latest":        [-0.612804, -2.215562, 0.792132, -0.651482, -2.400651, 0.047423, -0.096681, 1.335882, 0.176972, 0.849561, 1.180600, -0.659039, -0.453180, 0.075028, -1.174218, -2.576799, -0.433781, 0.657639, -0.178257, 1.154732, -1.243714, 1.093774, 1.291275, -1.545106, 1.221159, -0.511055, 1.819469, -2.283442, 1.198886, -0.448073, -1.150142, -0.796729, 0.319335, 0.128334, -3.369398, 0.529417, 0.001437, -0.841971, 2.942720, -1.803343, -0.292857, -0.785087, -3.741514, 0.917414, -2.835000, -0.515251, -1.833111, 2.204731, -1.536004, 2.052294, 1.424947, 1.128338, -0.868599, -1.878431, 0.746458, 0.305212, -1.198236, -0.216649, 0.096267, 0.374555, -0.088862, 2.874118, -1.643357, -0.156423, 2.514740, 0.331463, 1.577502, 2.079631, -0.169292, 1.362248, 2.312044, 2.920094, 1.592291, -1.020943, 0.824854, -1.844501, 1.401171, 1.337021, -1.520441, -0.597406, 0.476235, -1.770105, -1.775778, -1.380674, -0.423943, -0.135961, -2.185649, 1.998813, -1.881614, -2.436793, -2.301215, -1.802746, -1.508670, 2.766745, -0.379725, 0.447800, -1.477152, 0.246232, -1.762736, -0.226605, 1.478358, -0.281220, -0.200569, 0.942512, 0.680811, 0.564003, 0.730233, -0.543805, -1.819254, 1.064668, -0.735063, 0.515173, -1.019077, 0.219433, -0.205558, 4.559363, -0.043007, 0.875995, -0.014103, 1.605997, -0.083794, 2.355215, -2.294942, 0.451085, 5.738412, -2.193937, 2.306627, -0.206797, -0.429043, 1.211079, 0.743677, 0.486967, -0.604554, 3.979926, -1.719997, -0.458243, -1.574947, 1.082482, -1.041721, 2.636800, 0.249583, 0.375513, 1.986354, -1.146480, -0.135895, 0.425603, 0.235258, -2.052300, 0.907751, 0.968922, 0.058095, 0.114366, -0.043495, 1.277248, 6.483394, -1.122919, -0.901037, -0.529589, -0.472289, 0.045193, 0.137343, 0.339210, -1.675540, 0.323157, -0.596973, 2.038504, -0.258320, -0.723805, 0.649060, -0.376432, -0.666743, -0.516852, -1.214601, 1.561316, -0.097161, -1.401292, -1.235775, 0.696198, -0.013219, -0.327094, 0.388949, 0.851818, -0.798742, 1.672736, -0.357125, 0.195319, -0.968957, 1.882132, -2.164159, 0.966465, 1.970838, 1.228401, 1.307016, -0.780354, -2.326900, -0.253378, 0.554909, -0.015867, -1.282054, 4.145304, 1.606526, -1.657055, -0.323419, 2.354091, -0.137715, -1.177234, 0.135134, -0.850678, -1.318437, 1.429064, 0.823354, -1.856108, 1.066734, -2.022635, -1.705907, -1.349212, 2.103198, -0.661027, -0.516363, -1.717856, -2.184409, 0.591786, -0.810128, -0.042653, -0.053300, -0.578977, -2.760526, 0.647994, 0.101502, -0.755300, 3.488568, 0.945688, -2.934568, -0.603232, -2.520559, 0.025086, 4.840297, 3.097736, 0.784692, 2.397304, 0.305680, -2.447023, -2.189225, -0.969430, 1.493920, 1.075302, -0.067036, 1.552955, -0.465664, -1.123212, -0.728675, -0.749297, 0.706532, -1.319413, -1.125295, 1.821698, 0.137079, -1.636830, -0.331892, -1.192912, 0.536431, 0.858999, -1.610003, -1.506221, 2.193912, 0.672625, 0.448160, -0.996383, -1.689102, -0.126877, 5.714808, -1.373709, 0.686600, 0.351253, -0.133848, 1.320625, -0.884176, 1.701358, 0.211984, -1.276537, -0.437960, -0.997288, 1.922952, -0.103601, -0.280217, -3.679128, 0.637149, -1.101487, 2.051099, 1.635809, -0.314859, 2.362195, 2.349697, -1.089012, 2.470410, 0.708995, -0.304216, 2.844971, 0.791556, -0.444637, 0.558225, 0.479242, 0.974241, 2.825071, 1.306728, 0.078313, 0.698186, 2.259185, 2.549525, -0.517508, 1.916198, 0.455696, 1.226566, -1.129225, 0.448858, -0.044081, -0.379127, 1.051160, -2.174675, -0.091759, 0.735747, 2.523257, -0.902130, -1.711550, -0.054244, 1.433700, 0.505774, 1.657618, 0.392086, 0.526231, 1.710216, 0.001930, 0.739513, -1.474645, 1.431480, -4.844274, 0.771516, 8.317552, 0.171193, -0.780930, 1.585272, -2.259208, 0.890983, -1.644048, 0.793156, -1.216816, -1.476947, -1.916819, -0.577669, 0.435558, 0.042042, 1.585200, -3.304183, 0.816374, 1.643369, 2.185262, -4.168428, -1.474942, -1.652446, 1.270458, -2.830683, 2.015999, -0.219767, -0.819199, 0.177299, -0.177188, 8.587397, 0.417928, 1.846660, 0.958334, -0.446011, -1.990049, 1.178265, 1.220160, -0.392036, 1.025705, -2.985824, 0.119768, -3.222000, 1.787709, -0.567594, -0.899789, -0.806125, -1.066476, -0.087982, -0.146052, -1.054549, 0.050203, 0.174088, -0.530262, -0.432011, -0.981290, -2.461792, 0.434536, 0.645516, -2.864508, -0.226271, 0.076930, 0.691487, -1.207616, -0.840917, 0.031664, -0.244416, -0.005431, -1.305514, 1.818595, 1.123752, -1.502631, -0.217228, 1.076120, -3.123597, -2.397233, 0.748559, 0.946289, -1.433505, -1.423255, 2.287119, -1.483384, -0.653202, -1.123917, -3.034894, -0.068661, 1.750771, 3.814820, -0.543904, -2.120688, 0.588598, -1.713928, -1.987432, 0.561580, -1.330059, 1.559782, 0.092751, 2.162706, -1.395874, -1.907609, 0.188079, 1.494886, -1.341706, -1.462309, -1.755446, -0.664524, -0.508806, 1.211292, 1.618556, -1.989684, 1.365123, -3.682937, 1.727478, 4.240307, -0.660610, -1.706325, 0.999625, 0.635882, -0.853538, -0.408396, -0.089295, -0.027076, -3.028868, -0.055628, 1.818635, -0.340091, -0.737781, -2.827969, 0.061214, -3.141003, 1.612542, -0.221340, -0.929781, -0.728779, 2.008737, 1.408174, -0.920326, -0.653913, -3.041245, 4.198066, 0.342775, -0.846428, 2.108423, -4.409381, 0.387547, -1.371905, 0.054314, 1.987593, -0.481697, 1.530847, -2.225521, 0.796091, 0.499600, 0.113369, 1.249683, -2.361308, 0.537096, 0.347104, 0.889500, -2.390798, 0.981587, -0.782676, -0.177560, -1.397803, 2.599752, 0.272461, 0.764350, -1.675289, -6.649379, 1.849518, 0.405324, 1.632783, -0.265679, -0.399341, 3.205031, 1.300731, 0.026923, 0.748761, -0.405800, -0.083681, -0.470509, -0.623445, -1.020196, 2.729735, -1.121918, -0.092062, -1.711041, -0.705481, 2.327773, -3.161011, -0.284439, -1.496094, 0.119383, 1.386817, -0.770567, 2.041225, -0.222127, -4.341688, 1.785259, -0.743285, 0.297729, 2.546512, 0.138978, -1.180040, 1.914775, 1.235418, 1.610988, -0.544882, -3.502224, -0.664578, 2.622726, -1.956743, 0.572299, -0.548901, -1.306587, -0.785378, 0.914924, 0.783605, 0.050168, 0.140696, -0.740745, -0.447390, 2.542762, 0.579260, 2.657335, -0.486435, 1.883910, -0.114313, -1.502129, 1.014179, 0.511371, -1.522515, -0.871545, -2.773058, 0.282133, -26.943012, 1.947267, 0.995500, -0.375220, -0.263242, 1.126674, 0.268782, -0.826598, 0.771542, 0.601364, -1.670286, -0.794151, -1.052049, 1.664418, -1.771948, -1.081269, 3.088852, -3.009343, -0.591919, 0.359618, -1.442897, 1.283059, -1.601421, 0.059713, -1.544779, 3.660547, -0.505520, -0.916645, 0.456173, 0.726471, -1.988427, -2.519803, -0.745822, -2.397226, -1.916228, -0.335790, 0.921118, -0.033017, -0.358666, 2.200879, -2.152330, 0.277213, 0.005560, 0.110227, -0.299637, -1.675720, -1.592974, 1.257089, -0.001712, -2.691953, 1.374281, -0.238531, 0.113042, 0.792293, 2.955853, 0.360843, -0.801828, 0.655462, -0.418724, 0.435561, -1.148878, 3.189636, 0.903138, -1.041226, 0.955873, 0.062271, 0.814551, 3.057136, -0.883728, 2.508814, -0.221000, 1.197242, -2.853786, 0.273554, 1.465235, -0.667575, -0.914718, 0.315029, -0.234952, 0.899639, -2.900770, 10.891487, 1.999561, -0.751676, 1.435429, -0.009011, -0.587682, 2.481852, 0.600284, 0.532765, -0.083498, -1.631364, -0.921569, -0.740134, 0.913858, 1.653027, 2.440396, 0.355527, 1.974421, 1.495195, 1.609850, -0.556871, -2.574747, -0.148762, -0.081312, -1.779047, -1.183734, -3.157973, 1.748138, 0.045026, 2.432301, -1.731196, 2.810618, -3.697318, 1.252985, 3.420933, -3.625082, -3.448531, -1.449746, 1.944590, 1.515184, 1.475356, -1.926787, 0.662388, 0.312225, -0.247869, -1.336650, 1.309450, 1.289840, -1.791841, 2.233339, 1.110332, 1.409653, -0.755719, -2.309618, 4.158283, -1.370105, -1.869566, 1.757159, 1.965715, 1.056489, 0.521358, 1.980252, -0.349643, -1.611036, -0.729387, 1.155615, 0.988922, -0.361319, 1.082129, 0.068815, -0.772796, 1.792016, -0.615467, 1.128533, 0.463058, -1.345695, 3.185494, -1.301041, -1.996305, 0.665943, 1.273877, 1.243384, 2.585500, 0.488780, 0.761487, -0.218512, -0.334899, -2.600352, -0.445336, -1.509099, -0.626762, -2.004987, -0.558542, -0.612695, 0.547286, 4.910222, 0.287499, -1.193723, -2.562821, -1.388086, 1.646432, 1.326577, 1.111498, 0.203925, -1.517875, -3.704810, -1.093996, -2.021580, -1.916675, -0.515574, -1.984882, 10.937584, 1.183303, -0.082892, -1.700246, 1.597455, -1.050352, -0.636202, 0.016098, 1.592791, 0.928570, 3.135559, -0.550963, -3.442023, 0.400463, 0.729963, 1.314300, -2.109090, -2.259105, -2.970544, 0.484482, 1.226580, 1.740525, 1.844398, -1.912322, 1.129187, 1.192271, 0.424892, -0.787637, 0.865984, 0.864021, 1.354559, -0.063926, -0.434077, -0.012006, 2.852866, 0.389863, -0.533737, 3.148972, 1.060689, 1.866251, 1.682504, -0.012799, -0.573761, 1.548298, -1.396003, 0.405621, -2.820714, -2.699893, -0.464518, 0.939136, 1.039604, -1.127164, 0.015904, -0.974856, 3.763283, -1.769051, -2.537759, 1.364036, -2.556907, 0.317924, -0.741344, 0.664915, 0.365745, -1.611049, -4.680942, 0.238727, -1.081296, -0.018157, -0.069627, -0.692939, -1.438060, 0.028996, 0.443964, 0.479003, -1.074597, -0.792180, -1.287570, -0.472070, 1.143060, -1.331187, 0.279843, 1.538364, -0.755012, 1.269352, 0.147700, -1.993458, 2.027213, 1.952567, 0.130689, 0.175577, -1.684089, -2.843986, -1.475662, -1.313473, 0.493337, 0.865873, -1.506814, 0.911230, 4.060201, -0.657823, 2.244580, 0.376390, -0.728367, 0.955567, 1.227401, -1.857505, 2.921495, -0.229956, -0.072513, 0.932423, -0.201619, 0.232971, 1.557939, -0.515363, 0.838789, 1.482790, -1.667081, -0.391275, -0.706493, 1.428702, -2.145041, -0.090843, 1.153879, 2.321257, 0.291841, 2.032717, 1.010728, -0.259707, 1.123002, 1.452603, 0.396588, -1.609307, 2.755708, -1.096434, -1.128434, -0.456643, -1.087247, 1.039796, -0.082646, 0.103324, -2.672513, 0.541997, -0.463013, 0.924998, -1.137337, 1.717439, 1.806631, 2.417997, -2.722397, -0.501058, -1.222010, 1.497933, 0.565792, -2.130436, 0.640929, 2.653448, -1.959907, 0.000840, -0.380516, 0.897905, -2.555002, -1.004683, -0.593022, -2.975895, 1.278403, -0.375218, -0.149386, 2.255414, -1.526381, 0.084594, -3.294236, -1.326490, -0.203012, 0.810917, 0.465711, 0.857732, 0.408031, 0.310734, 1.656951, 0.135821, -1.223429, 1.009970, 0.376411, -1.546335, 3.112256, -1.660400, 1.687610, -1.672755, 0.315959, 1.257159, -0.057022, -1.575822, -0.488777, 2.446592, 2.351879, -0.211651, 2.386171, 0.332149, -2.233296, -1.876446, 0.195934, -1.926995, -1.603126, -3.828640, 0.260395, 0.385531, 1.310519, 1.721206, 0.296907, 1.001025, -1.458293, -3.032516, 1.179443, 4.504854, -1.917630, 0.154737, 0.922710, 0.373369, 0.973977, 0.497264, -1.309954, 0.364134, 0.821203, 3.184717, -0.706484, 1.003727, -2.330777, 0.502921, -0.394737, 2.235528, -4.310393, 0.849241, -0.515099, -1.969563, -0.550761, -2.059675, -1.198608, -1.031207, -2.272766, 0.249279, -0.653474, 0.180790, -1.657860, -0.870616, -0.077415, 1.024495, -3.140102, -1.015593, -0.066455, -0.954831, -3.604165, -0.667273, -1.388813, -1.103314, -2.176858, 0.181914, 0.389964, -0.991440, -2.473201, -3.683666, 0.407281, 0.973368, -0.768065, 1.279742, -1.934132, 0.383385, 1.219248, 0.550082, -0.545345, -1.063584, -0.883715, 1.895950, -0.573758, 0.624612, 0.401298, 0.868632, -1.362146, -0.179727, 1.337189, 0.909851, -0.502053, -0.723174, 0.274857, -3.272782, 2.643730, 1.040499, 0.818118, -2.340259, 0.767800, -0.872570, 0.223991, 2.124008, -1.491909, -2.273498, -1.011411, 1.214753, 2.879524, -1.277860, -1.260172, 0.567704, -1.517100, -1.385336, -4.617663, 1.183306, -0.573399, 0.656246, -0.754075, 0.392937, 1.649950, -0.528149, -0.135442, 1.853639, 0.393270, -0.521959, 0.088236, -1.188602, 0.953215, 2.363008, 1.364805, 0.558522, 0.649260, 0.088176, -2.218788, -3.740898, -0.470999, -0.987547, -2.570537, -0.633499, -0.908846, -0.763746, -0.247464, 0.398668, -0.264671, 0.690039, 0.891171, -2.439893, 2.026531, -0.118626, 0.410479, 0.928528, -6.758152, 0.315290, -1.122024, 0.909448, -3.880399, -0.821637, -0.097316, 1.094775, 0.483831, 1.949865, 0.616885, -1.824979, -1.264061, -1.626864, 1.742028, -1.491169, -3.775957, 1.858886, 0.115915, -1.174716, 2.287612, 1.828963, 0.607532, 1.403317, 1.295260, -0.010620, -2.863109, -0.452950, -0.232644, -1.840987, -1.883705, -0.450756, 1.741748, -0.218979, -2.266348, -0.593941, -0.933037, 0.010402, -2.074483, 2.166774, 3.937597, -2.954117, 0.752764, -2.348191, -0.561915, 1.157428, 0.141908, -0.717537, 0.776393, 0.175925, -2.025991, 0.168681, 0.788793, 1.720917, 0.646435, 2.423590, 0.186302, -0.146719, -0.446881, 2.395269, -5.198894, -0.901929, 0.027377, 0.012715, 0.005252, 0.653947, -0.101728, -1.622818, 2.549720, 2.113362, 0.824165, 1.469637, -0.910084, -0.856566, -1.045887, 1.253957, 0.019382, -1.706803, 1.173757, 3.334522, -1.034507, -2.077802, 1.543029, 1.082566, 0.904124, -2.292025, 2.802828, -1.242614, 0.167377, -2.591544, -0.791183, -1.533554, -0.887509, 0.720978, 2.352231, -1.702183, 0.675630, -1.299956, -2.844064, 0.003719, -1.818623, -1.573444, -0.050925, 1.413208, -2.972257, -2.596563, -1.668879, 1.804994, -1.123652, -0.759027, 0.555984, -0.408789, 1.027710, -2.098943, 1.386197, -2.098082, -0.305885, 0.517045, 1.235609, -0.239477, -1.754854, -2.860306, 3.810205, -4.061375, 6.525562, -1.502322, 0.979582, -2.672280, -0.722794, -0.761116, -0.911209, -0.857385, -1.241742, 1.673461, -0.335826, -2.154589, -0.201959, -1.013601, 1.139557, 3.868553, -2.091437, -0.092244, -1.082958, 0.550138, -0.458324, -0.412855, 1.429083, -0.965382, -0.067656, -1.103344, -0.841873, -1.369830, -0.513873, -1.954285, -0.804106, 1.982196, -1.215416, 2.908003, -2.176613, 2.629941, -0.770963, -0.225980, -0.079823, -0.029128, 1.944940, -1.664493, 1.579839, -0.645574, 1.956723, 2.056866, 1.170755, 0.282791, -2.806349, -0.015250, -0.545786, 1.350799, 4.326220, -0.232130, -1.864140, -2.026012, -1.895816, 0.236812, 0.566187, 0.922909, -3.498955, -1.690579, -0.976886, -2.819652, -0.867696, -2.262519, 0.221791, -1.079175, -0.067272, 0.605917, -0.939698, -0.604690, -0.263042, -0.898258, 0.427297, 0.077779, -2.724524, -1.529292, -0.498150, 1.099533, 0.170517, 0.831057, -2.982569, 1.253065, -2.751751, 2.265109, 1.696212, 1.009590, -1.266789, 0.647445, -1.694958, -0.019201, -0.734654, -0.022089, -1.503616, -0.469148, -0.154751, 1.000041, 1.188490, 0.510484, 0.473362, -3.674450, 0.073823, -0.515428, -0.987592, 0.794154, -0.582627, -1.815529, 1.149576, 0.177125, -0.941142, -0.892184, -0.065409, -0.450944, 0.527143, 2.578118, -2.089118, 1.487622, -0.322834, 0.694984, -3.292696, -1.095167, 0.084412, -0.336131, 1.718490, 1.054931, 0.402755, 0.110611, 0.581030, 0.870269, -0.076126, -0.403352, -0.003785, 0.589630, 2.314747, -1.574928, -0.096184, 2.244779, -1.489247, -1.319940, 3.177969, -2.140591, 0.425897, 1.085643, 1.190120, -3.366844, 1.705391, -0.522894, 0.263463, 2.918571, -0.561851, -0.452033, 1.765589, -0.628013, 0.580821, 0.500394, 1.468545, -1.244172, 1.824252, -0.539581, 2.399555, 1.622275, 0.161135, -1.577280, -0.976519, -0.920564, -1.119612, 0.282427, -2.540531, -1.077090, -1.163355, 2.653575, -0.378440, 0.068285, -3.100437, 0.973931, 2.660982, -0.405911, -0.221837, 0.831700, -1.742345, 1.977760, -0.828174, -0.731623, 1.165439, 0.582277, 2.558103, 2.128608, -1.686338, -0.048350, 3.158402, 0.870736, -0.881156, 5.433671, 0.142794, 0.158623, 0.606935, -2.015232, 2.057729, -1.138699, 0.622851, 0.681134, -5.330796, 0.991994, 0.044579, 2.001621, 0.730195, 0.956073, -0.110057, 0.502674, 0.217331, 0.120163, 0.781175, 1.368709, -2.420993, -0.130430, 0.028031, 1.529119, 1.404342, -0.774836, 0.976970, -0.148665, -0.810735, -0.887423, 2.013502, -0.897528, 1.571385, -0.008383, 0.587428, 1.256854, -1.503410, 0.921428, 0.784695, 0.600209, -1.875112, 0.421278, -0.644875, 0.660149, 0.551328, 2.647580, -2.805653, 0.070194, -0.181113, 0.577706, 2.920263, -2.099920, -1.701728, 1.026577, -0.970308, -1.412308, -0.846952, 0.894402, 1.019806, 2.684441, -0.601777, 3.547955, -0.055353, -0.914555, -1.066879, -0.039697, -2.241604, 0.908233, -1.655504, 0.784147, 1.764261, 1.648813, 0.832601, 2.696937, -3.205015, -1.966464, -1.120578, 0.565785, -0.143854, 2.089412, 1.442338, -0.479699, -0.790053, 0.074845, 1.469679, 0.232747, -2.076254, -0.281494, 0.827660, -0.684334, -1.107358, -0.133235, 1.017254, -0.384948, 1.280981, -0.930045, -0.921105, -1.558795, 0.127375, -1.567876, 0.532821, 1.409253, -1.080656, 2.452451, -2.294327, -1.574199, -1.484760, -0.220872, 2.730867, 0.811399, -0.238392, -0.848654, -0.197067, -2.837629, -0.801398, 0.288724, 2.595055, 0.498444, 0.218231, -0.518959, 1.092926, -0.662425, 1.339937, -0.907665, -0.197190, -0.613547, -1.629007, -0.238709, 0.357297, 3.501794, -1.675147, 1.486766, 0.735359, -0.442967, -0.105438, 2.399523, -2.097277, 1.310113, 2.508687, 1.327647, 2.497625, 2.764615, 0.153949, -2.596467, 1.362896, 0.454628, 1.369634, -0.951836, 1.850208, 0.564333, 1.808473, -2.133248, -2.364481, -1.498736, -1.338814, -0.211370, 1.439009, 0.346441, 1.246948, -2.544048, 0.537346, 0.455500, 1.187719, -2.117737, 1.728455, -0.451546, -0.895454, -0.198955, -2.570833, -0.231905, -0.201090, -2.200789, 1.508802, -0.005641, 0.888191, 0.773606, 0.296514, 2.750696, 0.998073, -2.350559, 0.385073, -0.722224, 3.232554, -0.555466, 2.469613, -0.621966, 0.343913, -1.743786, 2.984197, -0.114192, 1.330646, 0.330090, 1.004636, -0.388806, 0.534908, -1.113315, -0.655712, 0.044088, -0.376111, -1.495334, 1.396304, 0.641600, 3.271334, -3.178414, -0.669201, -2.582852, 1.645419, 0.540461, 0.331414, 1.225434, -0.241126, -0.785214, -1.386897, 2.089353, 1.537059, -2.968950, -2.054803, 1.550492, 0.073438, -6.223676, 0.510720, 1.496452, -1.351471, -1.195973, 2.776203, 0.679208, -0.240022, -0.835032, -2.313000, 0.119870, -0.593088, 0.348416, 2.193747, 1.520391, 2.199650, -0.957386, -1.181147, -0.167809, 0.753372, -0.953912, 0.265204, 0.064691, 0.636637, 0.821186, 1.538177, -3.686851, -0.144751, 1.793468, 1.569461, -0.292569, -0.914937, -2.088454, 1.005811, -0.910524, 0.439879, 1.328168, -0.349991, -5.888283, 0.596823, -1.514071, 1.365590, 2.133192, 3.649516, 1.322269, 1.227857, 0.811110, 0.388887, -0.209151, -0.919356, -0.269549, 0.332280, 0.980510, 0.128268, 0.441788, -1.434218, 0.779928, 1.991361, -0.801857, -0.177272, 0.155561, 1.864503, -0.188135, 0.443639, -0.201392, -3.339523, 0.004105, 1.716572, -0.807478, 2.900543, 1.237759, 1.324246, 0.431000, -1.628002, 0.775048, -3.182475, -1.265094, 0.635110, -3.442352, 1.492518, 2.690553, 0.904908, -0.579639, 0.497659, -0.484186, -1.314447, -1.616720, -1.006984, 1.163017, -2.161178, 0.830324, -3.379968, -1.966440, -0.237778, 0.294197, -0.255212, -1.485385, 1.590396, -0.970986, 0.815678, -0.565780, 0.765599, -0.579318, -4.809091, 1.025935, 0.239194, -1.943313, -1.061139, 1.005775, 1.067468, -2.438157, 0.836673, 3.396852, -0.050520, 0.135852, 0.473968, -0.147873, 0.170645, -2.914069, -1.719983, -0.259775, 1.541576, -1.214400, 2.774303, -0.243318, 0.546123, 1.026875, 0.803985, -1.588696, 1.601805, 0.223480, 0.261121, 2.564875, 0.269717, -1.855639, -1.292171, -2.966914, 1.832010, 0.865697, -1.470347, -2.289701, -1.025450, 1.778352, 0.709166, -0.841818, -1.011234, -1.655023, -1.304637, -2.845899, 2.109060, 0.490694, -0.554004, -0.603971, 2.273292, 2.756260, 0.654454, 1.722688, -0.533683, -1.680390, 1.645834, 3.247320, 3.622413, -0.011748, -0.446426, -1.015927, 1.978297, 0.093600, 0.309903, -1.602069, 1.275189, -2.761825, 0.222209, -0.531991, -1.257954, -0.675513, -1.324622, -1.133766, -1.195737, -1.083412, 0.394430, 0.766754, -1.259084, -1.275333, -1.115739, -2.586273, 0.477633, 0.823651, -0.393242, -0.378602, 3.018721, 0.094792, 1.576330, -0.092349, 1.915663, 1.527587, 2.498819, 2.451694, -1.046947, -0.252382, -1.892246, -1.338359, 3.089607, 1.443579, -1.052285, -0.664112, -1.388935, 2.425827, 1.507866, -4.611504, 1.285874, -0.190258, 1.272828, 0.272262, 0.953322, -9.179953, 1.193864, 0.965136, -1.622129, 0.291771, -0.943462, 1.972639, -2.435059, -2.278618, 1.818903, 0.539652, -0.396332, 0.881990, 0.799462, 0.343961, -1.754548, -1.114249, -1.897099, -0.147805, -2.150880, -0.000791, -1.300718, -3.832193, -0.780065, -1.945696, -0.030858, 0.000042, 0.805455, 2.447916, 1.722992, 0.645666, -2.109492, 2.115638, -0.315772, -0.850652, -0.936574, 0.623982, 3.587168, 0.475998, 1.707327, -0.216391, 0.332070, -4.023162, 1.608437, -2.313433, 0.220340, 2.958182, 1.409508, 0.431645, 2.044839, 1.702535, -0.292866, 0.813356, 0.400711, -0.594963, 1.127115, 0.228125, -1.051682, -1.838791, 1.764836, 2.409988, -2.069432, 2.133219, 0.451490, -0.315999, -0.431537, 0.789534, 0.334011, -0.943850, -2.796613, 0.327788, -0.906255, 1.931957, 0.522279, -0.413914, 1.412995, 0.679381, -0.042195, -2.019298, 0.345345, 1.177390, -1.735565, 2.314764, 0.146042, -3.456690, -2.104620, 2.330595, 0.024386, 0.955444, 0.700547, 1.084794, -0.012142, 1.254361, -0.973162, -1.171974, 1.185098, -1.160176, -0.593567, 1.845748, -0.596058, 2.846261, -0.244734, -1.378718, -0.462473, -2.080487, 0.281841, 1.384994, -0.871585, 0.513520, 2.387744, 0.941064, -1.231176, -0.448849, -0.052633, 0.448985, 0.817220, 0.193013, 1.681891, -0.783407, -2.896904, 2.945495, 2.379376, 3.305279, -0.913598, -1.055743, 1.049574, -1.227361, 0.528382, 0.199938, 0.744414, -0.433807, -2.558005, -2.157215, 0.963974, -1.745908, -1.255904, 1.547944, -1.196803, 0.286619, 0.908697, -2.191496, 0.680540, -0.226127, 1.272491, -0.747410, 1.405450, -1.639263, -1.129315, 2.574672, 3.253188, -1.135827, -1.554247, 6.512227, 1.753033, 0.823991, -1.835580, -0.983194, -0.124638, -0.022031, 0.961799, 0.278102, 0.214496, -0.402409, -0.732952, 0.021326, -0.049979, 3.023667, 0.962861, 0.732301, 0.545412, -4.248863, 0.301756, -1.325999, -0.260541, -0.856889, -2.518301, 0.555524, -0.147469, -2.487039, 3.068854, -1.992702, -1.467270, -1.067934, 0.004117, -0.854934, 1.883919, -0.786242, 1.432566, 2.126631, 0.402890, 1.339862, -0.364170, -0.067614, 0.919690, -0.946988, 0.548263, -1.141940, -0.144215, -1.553461, -1.630635, -2.944906, 1.639079, 0.876025, -0.813501, 2.072524, -2.988084, 1.245615, 1.588284, 2.049222, 3.446004, 1.397513, -0.983097, 0.600800, 1.770925, -0.023363, -2.459765, -0.360136, -3.184824, 0.099425, -0.029557, -2.068456, -2.332472, -0.842436, 1.643009, 0.654029, 0.474408, 0.444330, -0.097360, -1.171367, -1.889962, 0.326767, 0.592943, -1.331804, 0.875488, -1.152579, 1.449305, -1.075288, 0.005322, 0.690454, 1.979462, 1.497575, -1.565452, -0.646482, -1.231740, -0.468063, 0.026075, 1.132621, -0.163886, -4.350859, -0.162999, -1.589626, 0.918930, 2.337845, -3.165326, 1.935462, -0.262769, -0.543573, -1.576134, 1.592178, -0.567114, -0.999020, -1.033293, -0.094483, 1.341990, 3.398355, 1.109368, -1.159549, -1.815267, -1.440461, -0.481400, 0.583820, 1.289227, -0.128962, -0.791260, 1.091561, -1.030977, 2.240554, 1.209297, -1.108794, -1.177909, -1.032935, -1.174677, -0.174780, 1.738330, -0.852252, 0.423968, 0.159460, -0.752635, 3.254423, -2.653491, 2.442864, 2.021538, 1.303416, -1.141996, 0.336794, -0.213280, 1.275212, -2.173024, 1.875058, -0.671852, -0.158650, -0.674296, -0.202623, 1.995634, -2.191850, 1.883953, -0.336998, -1.238019, 0.541431, 3.305144, 0.508988, 0.442913, -0.243398, -0.671488, 2.152217, 0.667315, -0.198660, 0.038538, 1.180478, -0.690591, -1.226552, -1.727843, -0.004452, 0.568615, 0.672716, -3.409685, -1.916769, -5.679497, -1.047065, 2.928753, 0.270798, -2.017706, 0.235939, 0.796101, 0.556786, 0.606815, 2.368180, 1.899704, -2.680903, -0.161020, -2.383109, 3.612562, -1.170289, 2.787203, -0.194454, 0.293056, -1.171062, 1.410911, 0.924794, 1.064608, 0.280578, 0.916745, 1.811764, 3.447900, 0.579738, 0.847980, -0.238627, 1.592027, -1.497012, -0.859788, 0.591069, -0.158362, -1.913070, -0.523074, 0.404998, 1.078486, -0.227280, 3.061089, -2.930329, -3.051719, 0.243530, 1.025823, 0.923059, -0.768734, -0.191051, 0.014987, -1.984347, 0.293401, -2.607561, 1.919448, 1.775906, -0.505613, -0.935636, -2.807077, -1.265656, -0.166470, 1.108056, -3.304356, -0.859128, 0.256328, -1.324335, -0.095796, -1.686727, 0.520889, -1.217061, 3.472981, -0.801870, -0.179432, -1.440508, 2.409567, 3.577608, -1.589138, 1.651056, -0.551437, 1.384983, -0.337885, 1.555722, 2.393574, 3.072235, -0.135722, -0.258120, -0.878903, -1.838472, -0.335688, -1.138918, 2.891968, 0.375865, 1.447449, -1.276451, -0.781883, 2.368396, -2.391079, 1.875496, 0.306684, 2.814644, -1.086365, -2.282871, 1.473142, -2.381812, -0.086461, -1.239312, 1.242444, 0.206965, 0.585810, 0.195624, -1.651873, 2.583272, -0.446626, -0.496205, -1.813773, 1.460548, 0.687077, 0.128585, 1.167922, 1.050403, 3.267393, -0.148534, 0.143266, -0.469868, -3.007697, -1.975590, 0.431214, -1.631661, 1.333369, -1.787341, -1.832982, -0.767213, 0.469098, -0.397199, 0.525026, 0.400948, -3.660139, -1.129983, 0.347222, -0.579625, 1.167796, -0.135037, 2.250765, 0.716468, -0.021767, -2.187217, 3.340802, 0.250752, -0.200596, 1.184929, -5.449180, -0.588696, 0.933857, -4.186715, -1.178003, 2.194058, 0.318955, 1.976931, 0.966189, 0.770698, -2.781453, -2.597327, -0.397811, 1.257845, 2.107651, -2.781290, -2.204887, 0.622558, 2.261195, -1.756650, 2.628099, 1.156767, -1.292722, -2.604963, -0.427024, -1.993675, 1.186585, 1.375645, 0.584989, 0.813572, 0.484206, 1.400198, 1.011191, -0.457454, 0.431162, 1.074501, -4.307183, -0.939692, -1.439165, 1.343590, 2.248487, -1.706700, 0.860494, -1.267403, 0.991206, 0.790562, 3.558558, 1.035073, -0.183890, -2.465762, 2.085005, -0.439460, -1.367476, -1.726921, 2.538736, 2.148680, -1.555413, -2.933472, 2.298046, 0.778528, 0.004253, 0.384853, 1.302090, 1.833395, 1.504384, 2.349369, 0.498941, 0.377971, 0.903127, -1.714386, 1.808365, 0.401536, -1.061390, 1.349846, -1.273103, -0.958734, -4.003449, 1.556067, -0.588390, -0.763635, 1.670594, -0.111270, -1.567189, 0.124333, -2.372967, 2.483768, 2.342311, -0.635273, 2.274959, -1.561237, -0.795889, 3.526085, 0.525109, -3.311819, -1.773293, -2.193236, 2.924568, 0.621321, 1.345457, 0.065248, -0.632541, 0.017810, 1.961737, -0.078699, 1.047366, 1.972132, 1.459933, 1.816787, 0.169214, 2.447540, -1.006916, 1.575598, 2.228669, 1.292875, 0.412555, 1.329749, -0.697624, -0.298773, 2.185552, -1.368109, 1.348547, 1.603870, -1.370027, -0.277771, 1.377412, 0.442057, -1.199049, 1.792844, -0.991395, 0.269774, -1.861074, -1.239275, 1.189940, 0.530348, -1.113256, 1.715649, 2.073871, -0.560494, -0.075858, -0.088142, -2.563752, -0.006378, -2.288894, 0.969853, -2.207385, 1.185278, -0.296590, 3.099877, -0.166058, -4.040632, -0.869314, -0.657492, 0.128895, 0.859182, -1.401345, 0.116238, 0.440015, -1.148709, 1.511427, -2.100009, 0.440736, -2.851284, -1.550734, 1.587870, -1.926517, -1.622249, 0.362452, 0.393982, -0.589483, -0.028004, -0.689772, 0.956467, 0.254517, 1.234098, -0.569437, -1.736388, 1.245260, 2.599948, 0.029409, 0.954440, 0.952267, -0.061380, 1.054168, 3.325345, 0.831169, 0.006168, -2.285375, -1.643000, 2.221149, 2.165285, 0.602395, -1.775944, -1.016710, 1.076364, -0.484446, -0.370944, -0.481427, 1.652357, 0.379384, 2.971716, -0.467889, 0.796451, 1.967816, 0.599432, 0.234169, 0.459560, 0.644779, 1.913226, -1.753372, 0.740245, 0.276683, -3.648571, -1.848193, -0.744854, 1.631157, 4.498623, 1.637381, 1.898848, -0.083583, -0.153889, -1.734444, 1.382062, -2.935525, 8.032228, 1.329467, -0.673482, -1.722037, 0.504168, -1.629518, 0.466105, 1.766385, 2.825903, -1.433709, -2.023616, 1.084171, 0.101056, -0.245807, 1.811927, -2.314635, 0.898469, -0.996189, 2.491988, 1.728794, -1.631869, 1.359730, 0.318513, -2.062926, -3.121436, 0.557902, -2.539874, -1.457490, 0.280348, -1.866082, -2.761239, -0.655958, 0.704248, 2.974621, -2.696070, 0.992441, -0.275967, -0.623660, 1.204403, 0.438535, -2.725973, 0.280349, 0.460025, -2.209344, -2.415729, -1.851040, 3.049643, 0.707512, 0.050940, 2.559838, -0.988853, 0.171449, -0.343654, -1.355067, 0.663537, 0.685152, 0.458565, -2.712296, 2.219773, -2.099763, -0.631212, -1.383506, 1.134895, 1.109969, 1.936868, 1.218190, -2.372160, -2.198699, -1.117006, -0.035712, -1.748089, 0.579812, 1.111341, 1.359734, 1.213473, 1.989610, 0.369072, 0.192755, -1.581744, 1.194623, -2.739968, -1.546148, 3.911412, -0.394406, -0.627090, -0.957422, -0.355542, -0.909030, -2.104877, 1.044099, 0.480431, -0.838496, 1.587747, 0.798822, 0.652407, 2.037358, -0.056508, -1.414758, -0.086790, -1.121325, 1.122403, 1.669306, 2.035714, -0.269159, -3.826930, -0.235112, -1.695780, -0.398885, -0.197627, -0.910344, 1.833906, -1.359157, 0.417122, 0.397929, 3.197263, -1.990909, -10.345819, -0.796384, 0.827685, -3.047179, -2.074122, -1.859646, -2.827106, 1.286799, -3.407764, -0.257633, -1.792917, -1.726816, 0.072984, 1.012066, -0.526178, -0.272112, 1.060727, -0.330206, -0.610387, 1.074508, -0.536489, 0.489240, -0.099734, 0.628324, -2.983965, 0.941947, 2.500448, 1.332715, 0.284890, -1.083395, 0.012050, 0.028791, 1.571663, 0.851643, -0.147082, -0.702242, 0.288649, 4.333088, -2.094810, -1.181755, 0.154108, 2.446234, -0.844859, 0.775269, 0.940873, -2.585838, -0.907137, 1.847467, 2.794644, 0.467335, -5.823781, -0.234167, -2.143270, -3.499144, -0.217149, 1.157717, 1.193232, -1.152372, 1.913921, 0.686949, -1.665765, -0.626728, -3.686383, 0.242325, 2.319199, -0.805866, -0.814822, -2.567696, -0.763215, 1.596967, 0.506704, -0.221747, 1.634145, -0.042335, -6.479886, -0.780314, 2.458230, 1.516087, 0.357523, -0.200469, 1.723241, 1.960635, -1.998756, -0.632013, 0.338134, -0.954243, 2.203972, 1.709370, -1.285704, 0.509672, -1.389535, 0.565242, 0.630339, 2.338622, -1.926264, -0.267491, -0.774171, 0.423991, -0.432595, -1.668700, -1.206680, 1.769401, 0.682602, 0.387151, -1.169134, -1.961097, 0.521347, 1.030647, -0.850463, -1.670901, 0.320442, 0.870508, 0.266324, -2.534516, 2.353228, -2.028431, -1.092895, 0.823078, -3.300803, -0.905493, -0.498755, -0.083098, 0.134747, 1.126226, -0.206224, -0.465925, 1.676973, 0.226049, -0.679422, 1.636569, -1.207536, -2.509265, 1.030790, 0.094372, -3.540144, -0.260700, -0.251626, 0.870629, -0.965907, 0.189883, -1.507507, -3.256093, -1.355452, -0.515230, 0.603112, -1.958845, -1.242684, -3.519969, -0.119781, -1.800199, 0.362269, 1.003944, -0.733221, -3.117880, -1.978782, -2.750227, 0.628010, 0.834023, 1.378811, 0.721543, 1.640973, -0.208344, -0.871235, 2.310358, -2.721220, -0.248023, 3.266030, -2.627601, 2.405243, 0.034087, 0.616138, -1.485607, -3.443488, 0.122094, -0.339027, 2.636119, -0.520162, -0.845279, -0.433359, 4.289217, -1.461683, -2.912294, 1.157770, -0.072004, -1.430300, -0.518065, 0.848698, 1.195008, 1.465089, 1.246130, -3.653644, 1.968034, -0.055597, 0.686380, 0.139687, 3.673851, 0.591359, -0.096465, 0.772135, -1.569232, -0.224120, 1.321505, 1.987114, -0.279955, -0.609099, 0.100604, 1.882433, 1.155056, 0.589389, 2.115252, 1.120671, 0.971936, -3.108589, 1.296618, -0.871856, 0.220171, 0.492959, 0.178675, 2.086147, 1.167586, 0.006426, -1.481592, -0.752377, 1.132332, -2.913472, -0.529229, -1.036058, 0.311685, -1.022353, -0.715158, -0.308950, -1.048710, 0.525816, 2.266930, 0.901488, 0.781013, -0.861028, 0.214446, 1.634781, -2.360174, 0.209138, -0.023756, 0.834960, -1.243474, 1.813794, 0.411667, -0.503325, 1.657814, -0.658775, -0.429320, 0.879727, -0.344314, 0.517012, -0.041244, 1.911083, -1.361798, -2.262503, -0.125114, -1.859583, 0.162502, -0.145491, -1.907688, -1.083771, 1.522410, -1.053662, -2.348332, -1.029892, -1.759236, 1.863647, 1.120211, 0.716685, -0.467506, 1.255322, -1.252031, 0.646594, -0.172564, -1.936044, -2.740171, 1.214675, 1.419253, -2.031842, -0.598199, -0.056713, 0.328540, -0.144430, 0.681323, 1.078133, -0.266249, 0.647757, -0.277778, 2.481803, 0.399793, 1.677046, -3.165335, 1.663298, 0.981927, -2.690334, -2.536687, -0.046288, -1.993745, -2.154355, -2.084886, -1.682671, -0.510585, -1.151454, -1.231588, -0.730014, -2.252337, -0.175934, 1.438152, 0.703790, 0.091381, -0.226761, -1.542531, 0.349996, -0.507521, 0.876646, -1.783002, 2.163898, 1.904030, -1.513281, -0.666223, -0.145403, -0.304008, 0.472697, -0.251674, -0.699141, 1.437354, 0.894560, 1.408713, -0.367863, -3.061508, 1.625146, -1.152143, -0.360476, -1.491743, 1.101422, 0.997161, -1.990891, 0.091491, 1.926685, 0.341844, 0.450564, -1.216493, -2.320588, -2.028212, 0.037600, -0.969572, -1.662101, -1.113374, 0.952514, 0.703434, 2.289342, 1.759977, 0.639049, 1.110425, -2.811371, 1.425214, 0.513256, 1.155478, 0.563762, -1.794843, 3.703616, 1.596296, -1.570067, 1.446437, -1.243573, -0.149773, 0.239586, 0.629837, -0.757124, 1.040002, -0.702845, 1.432802, -0.919857, -0.847647, 0.776140, 0.350346, -0.424403, 2.288020, -0.764005, 0.927623, 0.732643, -0.722699, -2.137658, 1.106374, -0.961683, -0.022159, -1.673372, 2.340934, -1.913663, -0.286280, 0.157772, -0.299253, 0.324698, 0.935570, -0.192480, -3.157064, -2.161556, -0.104483, 0.436096, 1.686085, 0.661099, 0.259927, -0.083087, -0.331702, -1.241498, -1.299620, -0.169502, -2.640443, -1.188181, 1.350742, -1.958650, -0.967032, 0.709098, 0.532227, 3.951190, 1.688368, 0.605110, -0.735107, 0.690662, 1.352871, 2.514204, 4.137648, 1.381989, -0.879419, 1.455069, 0.820441, -1.080353, 1.618252, -0.589761, -0.819875, -0.780784, 0.071111, -0.862547, -1.600801, -0.283200, 1.267667, -0.058537, -3.001881, 1.718999, 0.039207, -0.471396, -1.858776, 2.247664, -0.282785, 0.356052, 1.806696, -2.924471, -0.630809, -1.387571, 1.156041, -0.707393, -0.421613, -1.777457, -0.372323, -1.390987, -0.298395, 0.491878, -1.456836, -0.646174, -0.912228, -1.040814, 1.018844, -3.066530, -1.699523, -1.227899, -2.540288, 2.112853, 0.551650, 1.739899, 0.508044, -1.674967, 0.007424, 0.273819, -1.379015, -2.258660, 2.093238, -0.269717, 2.135967, 0.554894, -4.862602, 0.403414, 1.784584, -0.607983, -0.754711, -2.560575, 1.873582, 0.519547, 0.662227, 1.440363, -2.762729, -0.950051, 0.833175, 1.800522, -1.254883, 3.032549, 0.313758, 1.150326, -0.947803, 0.188012, -5.928670, 0.860420, -1.445431, 2.212289, -0.620558, -0.848972, 0.998138, -0.842959, -1.301280, -0.186356, 0.721201, -0.689276, -1.068574, 1.181227, 0.725315, -0.492998, 1.629001, -1.054909, 2.844236, -0.348600, -0.540042, 2.060351, -1.458005, 1.092593, -1.638849, -0.366834, -1.199931, 2.062175, 1.491682, -0.677772, -0.804922, -1.459533, 0.312466, 1.470264, 1.401966, 0.165094, -1.124756, -0.272886, 0.294967, -3.168278, 0.447026, -1.721222, 1.331578, -0.296635, -0.493381, -2.273772, 1.680882, -0.251190, 1.727859, -0.125792, -0.138742, -2.155842, 0.303363, 1.732968, 1.289022, 0.059376, 1.425003, 0.591880, -0.409793, -2.310584, 0.329457, -1.593862, 0.156367, -3.533641, -0.738178, -0.600709, -1.993809, -1.257213, -1.873214, 0.118924, 1.594824, 0.734960, -0.335038, 0.911282, -0.486446, 1.029378, 0.792031, 1.580075, 1.268993, 0.541517, -0.537808, 0.253363, 1.640724, -0.829691, 1.295986, 0.592495, 0.474230, -1.604008, -3.370971, 2.434560, -0.244377, 0.736183, 1.336372, 0.282443, -1.538506, 0.877491, 2.405556, 2.700866, 0.161710, -0.831850, 1.367040, -0.964631, 0.162463, 0.797982, -0.896789, -1.564666, 2.302680, 2.206859, -0.236026, -2.542495, -1.297982, -1.266405, 2.058210, 1.408397, -0.732165, 0.866827, -0.404816, 0.205700, 1.771121, 0.329369, -0.527325, -3.710169, 0.928608, 1.971085, -0.629562, -0.142901, -1.235813, 0.033189, -1.654015, -1.717465, 1.444321, -0.970576, 1.333245, 0.316356, -0.559113, -1.536770, -1.221961, -0.236194, -2.713904, 1.561908, 0.549531, 2.427177, 0.743518, -1.251259, 1.159088, 1.531313, -2.054123, -0.121870, -0.735877, 0.613076, 0.061634, 0.791451, -0.418798, -0.353790, -2.750365, -0.335011, -0.806334, 2.259530, 1.895240, -1.027326, 0.580512, -2.093052, 4.493968, -3.208963, 0.151161, -0.620175, -0.954287, -1.633909, -0.543642, -0.937648, -3.595685, -2.022834, -0.843202, -0.818238, 1.229040, -0.084374, -1.927831, 2.984450, 1.392313, 0.512093, -0.250420, -1.257843, 1.695182, 0.376897, 1.391729, -1.856572, -0.275027, -0.270027, -2.853746, 0.717726, -2.140739, 0.550229, -1.774874, 1.074416, 0.352224, 0.223556, -0.254174, 0.730164, 0.673305, 0.502584, -0.126746, 3.259682, 1.314091, -0.251725, 2.246045, 1.809486, -1.109016, -1.265288, -0.020519, 1.468189, -0.056720, -1.800602, -1.475459, -2.710105, 1.906110, 0.197875, 1.211238, -1.030333, 2.568016, 4.726199, 1.455643, -1.406735, 0.125682, -2.438629, 0.663524, -2.310382, 0.313568, 0.844070, -0.749174, -1.880922, 1.743759, 0.528783, -1.526943, -3.150258, 0.424008, -2.298711, 0.589580, 1.325055, 0.218228, -2.742978, -0.011931, 0.172254, -0.059231, -3.013192, 2.519255, -0.317165, -0.429024, 1.866894, -0.411766, 0.096046, -0.343778, 1.481043, 1.154165, -0.866295, 1.419388, 2.962029, -3.715078, 0.980836, 1.903221, 0.349094, -0.565781, -0.302156, -0.928041, 0.006622, -0.295362, 1.123859, -0.271984, 0.857013, -1.092173, 0.748905, 0.973096, -3.126032, 1.077309, 0.597780, -0.179388, 0.118135, -0.435285, 1.807033, 2.569278, -1.103961, -1.658203, -1.114181, -3.346684, -1.036287, 1.735447, -0.008384, -1.309239, -0.149531, -0.720216, 0.165458, 2.016681, 0.902350, -0.987751, 1.810139, -2.990412, 1.655684, 0.073364, -0.018267, -15.890377, 1.605731, -0.200125, 1.847888, 0.107478, 0.299769, -1.568345, 1.292254, -4.000039, -0.130882, 1.948626, -0.187894, 0.779199, -1.546185, 1.299067, -1.526557, 2.252347, -0.622256, -0.335947, 1.197534, 0.948126, -1.156192, 0.112613, -0.648274, 1.190632, -0.332018, 1.824138, 0.295704, 0.086898, 0.212632, -1.849817, -0.494434, -0.685018, 0.587919, 1.077813, -0.110394, -0.048261, 0.329976, -1.213555, -0.728337, 2.018705, 0.968140, 1.196094, -1.172107, 1.532893, 1.033232, 1.173709, -2.093134, 0.631497, -1.686658, -0.125552, 1.447971, -1.023348, 0.033178, 1.684072, -3.614491, 2.426406, 0.881374, -2.132526, 0.218011, 0.876277, -0.079876, -1.178015, 1.049176, 0.994832, -0.411852, -0.638060, 0.785967, 0.293327, 2.419301, -0.859055, 0.185743, 0.196676, -3.193673, -1.412443, -1.207662, 0.240055, 0.838873, -1.220432, -2.517108, 0.517194, 2.698374, -0.636612, -1.815033, -0.867607, 0.417273, 1.320199, -0.605956, -1.022787, -0.140377, -0.299150, 0.526502, 2.765671, 0.892078, -0.705774, 1.326158, -0.860298, 0.809407, -0.115889, 0.414584, 1.086166, -0.568674, -1.643514, 1.066131, 0.607676, -0.679681, -1.080888, -1.422895, 0.086499, -0.032735, 0.706417, -1.029094, 0.681382, 1.216894, -1.109293, -2.394442, -1.575637],
+	"gemma:latest":         [-7.841464, 0.767280, -3.694498, -0.001570, 5.630982, 0.547684, 0.593437, 1.212109, -4.688223, 2.092025, -0.195202, -3.266952, -8.518199, 0.070584, 8.807918, 5.111681, 1.993832, 2.246897, 3.247146, 1.761566, -2.598649, -4.455876, -0.426431, -1.164721, -4.319394, -2.370344, -2.636060, -9.681966, 0.902103, -3.334890, 5.026929, 2.570854, 3.294873, -1.646322, 0.686373, -2.270081, 0.412342, -0.825905, 5.388302, 2.884646, 3.659441, 0.431944, -1.809620, 2.615254, -2.043983, -2.777713, 2.215231, -2.675906, -6.405007, -0.564862, 5.054352, 1.449780, -1.436352, 4.117354, -6.379251, 5.407580, -0.150039, 0.914680, 0.118216, 0.678776, -0.758149, -1.420658, -2.271526, -3.702099, 4.400447, -2.201090, -3.284660, -4.731477, -2.380802, 6.390813, 1.701460, -0.104133, 3.314382, -5.486407, -0.497424, -0.284862, 3.509625, -5.719083, 0.680101, -0.510482, 23.860937, 0.594281, -2.543869, -3.641017, 2.601214, -5.317530, -2.042041, 9.326493, -3.053976, 3.848793, -1.276841, 3.612005, -3.855268, -19.251253, -1.177541, -2.570348, 4.785016, -2.585326, 2.770618, -3.863601, 1.766158, -2.165138, -2.194496, -4.630305, -4.860158, 1.823642, -4.802298, -5.029325, -0.455342, 0.265742, -2.935017, -1.257262, -2.160555, -3.854411, 1.721239, -4.565334, 4.482922, 5.269161, 7.393859, 2.433737, -3.377688, -3.388730, 4.868166, -0.695461, 0.054102, 8.874551, 6.244246, 3.253466, -1.038608, 0.156654, 1.452707, 0.270274, 3.716397, -0.122048, -0.453325, 0.174775, 1.977967, -0.052099, 2.632564, -0.436857, 2.643586, -0.714668, -2.953247, -1.018465, 2.830314, 1.584697, 5.180400, 24.356844, 1.066748, 2.112376, -1.408623, 0.145132, 6.618137, 0.286551, 2.925483, -0.982488, 0.942114, -4.367652, -3.424205, -2.764689, 1.796536, -1.796484, -5.243772, -5.039718, 2.987508, -5.299181, -3.224602, 0.706269, -1.158171, 1.424140, -0.836250, 12.712502, 3.236430, -1.768288, -1.191801, -1.979457, -3.468471, 2.570443, -6.846710, 3.446858, -5.207852, -1.363930, 8.023638, -0.245742, 7.686746, 1.518655, -0.701010, 5.023693, -4.301249, 1.426088, 0.490979, -4.402451, 2.674457, -0.641645, 2.394136, 3.862517, 0.654314, -2.431630, 1.330356, 1.102203, -4.609156, 6.544267, 0.908221, 4.097334, 1.930546, 0.878689, -3.173191, 3.115578, 0.266788, 1.188245, 0.096632, 0.762899, 4.334242, 2.847873, -3.059559, -0.996375, 6.134145, 3.063574, 0.923209, -0.568771, 2.497131, 4.024843, -0.112233, -1.995722, 0.658854, -4.985031, 13.744526, -3.286121, -4.657845, 1.674132, -0.948745, -0.680255, -2.020968, 1.239926, -2.372422, -2.296769, 1.042987, -2.033823, -4.339974, -2.441006, 10.738417, 2.673922, -0.043110, 1.825774, -6.405427, -2.097725, -6.020026, 3.646997, -5.520285, 2.700456, -6.084919, 2.107202, 25.698597, 0.423951, 1.604773, -3.311516, -0.918613, -1.273135, 0.130150, 1.867079, 1.428254, -7.172602, 0.594419, -5.660052, 0.378981, -7.878116, 2.772577, -0.467461, 4.479719, 2.089232, -3.973105, -3.616508, 4.947463, -1.895533, 0.616217, 0.992185, 1.378321, -4.616071, -6.274027, 3.722451, 6.198734, -2.148652, 6.459441, -2.609032, -1.101084, -1.849451, 0.045313, -0.134042, -0.758635, -4.064408, 4.666423, -0.272462, -3.961690, -2.621774, 3.876971, 1.087932, 1.772216, 2.323947, 3.050706, -0.472571, 2.760212, -3.276031, -1.541200, 0.861113, -1.634208, 1.556657, -5.462245, 4.749938, -0.511214, 0.835638, -4.087888, -3.569330, 1.798860, 0.072654, -2.034816, 2.618757, -0.285620, -5.038560, 1.369521, 2.782526, 2.107290, 0.403349, 0.029528, 0.551149, 1.817455, -0.422488, -0.939724, 3.047044, 5.337030, -0.610855, -2.417710, -0.778550, 1.426874, 0.531995, -4.521540, 0.579978, -1.685038, -1.053357, -0.430984, -2.011636, -2.547843, 1.882915, 1.394932, -3.340450, -7.000714, 6.059894, 4.306222, -1.949908, -1.907760, 1.408615, -1.032219, -0.777444, -1.147053, 1.421870, -4.536372, -2.536196, 3.107574, 7.283909, 1.061354, 6.247738, 2.492666, -5.004519, 0.904036, 4.047616, -3.514202, 11.607808, 1.687278, -1.415314, 1.216097, 2.033355, -0.726293, -1.276973, -1.893091, -0.640427, -2.503571, 1.696139, -2.582898, 0.504075, -0.714980, -1.114511, 1.565292, -4.212490, -3.659786, 3.794113, 8.251262, -10.930680, -0.854877, 1.403714, 0.182559, -1.664759, 8.205954, 3.175723, 5.751433, -12.345897, -3.690118, 2.157351, -1.931254, -2.685014, 1.519460, 1.611310, 1.483691, -4.705966, 0.910489, -0.591503, 0.810909, -3.566463, -0.922904, -4.133647, 0.693398, -3.398308, -7.057076, 8.150643, 5.149821, -0.921952, 4.039984, -0.691125, -1.622174, 0.400555, 0.905017, -4.130281, 3.546237, -2.394435, 1.307221, -0.929193, 4.105279, 4.830922, -3.155889, -14.082384, -4.265063, -1.797958, -5.852248, -0.629024, -4.556770, 0.366079, -0.369934, -2.857206, 2.940807, -2.827428, -0.681767, 4.712918, 1.876945, -1.581710, 1.920033, 0.997621, 3.036944, -3.794464, 3.923013, 3.016330, 2.200749, 1.329625, 3.200423, -2.155055, -0.045490, 0.783951, 0.035822, 2.317424, -1.388349, -4.879757, 0.744934, -3.256709, -0.804720, 7.820601, 11.581930, -2.986958, 0.528871, 1.816532, -2.757020, 1.623767, 4.469307, -0.886595, 2.844160, -3.054128, -6.978522, 3.604878, 1.441362, 3.164423, 1.423137, 2.847337, 7.830255, -0.284583, 4.562284, 1.127404, -0.847715, -3.143726, -0.630776, 1.426530, 0.976097, 3.624685, -0.938379, -6.768874, -0.651724, 1.110691, 3.410451, -3.711426, 1.794142, 4.888909, -4.868391, 0.880074, -1.496938, -7.605275, -2.405980, -0.121072, -0.605833, 3.600265, 3.735211, 2.631172, 1.780346, -5.649825, 3.464066, 1.915498, -2.024964, 1.766772, 0.901463, 2.372195, 1.347422, -7.564970, 1.267246, 0.651467, 3.137627, 8.275804, 3.045985, -3.053710, 0.481985, -4.368095, -1.565027, -4.889802, -4.077986, -1.479097, -3.717297, -2.533653, 2.198562, -0.109395, -0.155248, 2.343427, -0.468254, -3.644486, 1.830998, -4.832081, 2.420253, 1.116277, -1.544990, 1.079973, 6.023523, -9.470542, -0.526658, 7.871091, 2.927501, 4.015531, -0.123413, -5.463810, 1.431790, -6.487438, -1.922884, -0.861428, -0.036432, 3.015611, 1.705245, 4.016040, 0.833335, -0.830895, -2.416893, 1.853543, 1.442106, 2.786620, -5.825129, -0.392634, -0.432307, -0.310255, -26.123421, -10.270937, -0.605280, -2.417578, 0.074350, 1.818826, -3.421947, 2.731808, -0.854174, 4.481255, 1.564091, -1.778919, -1.828312, 2.696471, 7.367148, 4.592233, 4.908090, 1.126599, -1.665563, -0.780767, 3.878027, -3.719045, 2.003561, 1.991732, -3.794286, 1.595907, -0.790974, -2.584416, -1.720546, 3.110297, -2.654614, -1.623367, 2.808341, -0.982782, -1.389468, -6.029696, -2.206492, 4.260851, -0.911281, 3.791123, -2.190428, -4.110178, 0.233069, -5.447139, -3.454870, -1.139729, 6.519983, -3.451788, 2.835515, -3.099709, 2.008346, -0.995626, -4.298965, -3.528227, -1.642498, -0.824614, -0.313984, -10.397189, 4.317847, -0.054997, 2.312208, 2.155351, 1.661524, -0.382523, -4.136478, 1.731175, 0.901455, -1.022260, 0.001772, -0.954372, -8.358492, -1.169786, -1.899708, 4.123806, 2.677999, 2.342832, 1.728150, -0.675461, -0.304928, -1.078242, -0.691467, 1.891031, -1.786393, -0.564162, -13.336782, -4.090851, 3.861676, -1.598557, 0.933130, 0.698961, -3.715008, 2.574142, 4.791670, -0.188703, 1.011477, 4.921413, -1.197381, -3.308120, 0.582081, 0.331216, -2.146964, -3.609845, -7.480417, 1.514748, -2.403366, 4.309848, 1.741206, 3.171790, 0.853578, -4.242896, 0.141156, -0.251999, -3.080003, 1.189685, -1.780310, 0.420805, 6.441176, 2.017550, 1.482109, 0.764820, 1.094513, 5.994189, -4.575535, -1.519281, -0.374821, -4.902756, -0.010785, -1.235264, -4.738406, -1.369686, -1.838703, 1.938332, -0.127818, 2.391236, -1.536056, -2.691419, 3.799701, 4.308792, -5.365365, -0.148878, 4.840180, -2.937357, -9.366514, 1.900619, -3.951562, -4.660779, -3.658832, -2.265880, -1.386611, -1.411259, -6.987273, -1.214803, -0.367818, 0.366117, -0.853615, -1.189381, -1.000214, -1.222832, 4.147648, 3.938485, 1.390094, 0.673034, -2.271415, 2.270089, -8.178805, -2.946841, -3.605500, 6.102495, -0.707714, 0.949894, 10.250167, 3.335208, -3.743140, 2.595495, 2.758212, 6.216015, -3.416305, -0.552264, 9.871416, -1.171531, -4.325271, -1.568509, 4.567388, -2.874155, 6.230402, 2.852392, 0.660041, 2.967154, -5.745966, 3.458155, 3.823436, -0.257130, -2.840716, 4.736637, -1.568797, 2.081702, -3.604527, -0.762095, -4.104626, -0.530010, -3.828546, 7.783548, 4.337686, -5.351483, 6.355220, -3.483813, -3.475105, 2.975876, -0.976615, -5.787482, -7.701257, -1.262574, 0.786711, -4.565193, 3.683280, -4.765723, 8.778955, 5.847812, 2.533564, 1.889782, -3.028593, 0.002415, 1.342857, -1.931820, -2.139125, -1.684002, 0.731061, 3.983507, -2.122521, 3.105117, -0.218142, -2.053858, 3.292550, 4.761068, 6.120069, 0.689729, 6.416157, 2.443371, -0.226030, 2.215002, -4.565110, -6.034309, -4.214690, -2.231462, 2.031740, -6.372641, 0.113933, -2.703660, 2.426773, 4.642745, -3.197048, -0.774607, 0.188611, 4.881577, 2.400140, 4.379480, -2.926288, -4.284175, 5.439656, 1.877821, -1.054169, -3.342324, 4.214714, 0.351784, -0.653580, 0.101221, -7.039636, -5.464549, 3.054862, 2.914082, 0.636254, -3.334089, -2.415467, -7.143037, -1.505419, 3.485719, -7.142840, -1.205475, -1.908036, 1.074051, 2.948884, 1.175272, -4.384288, -7.127125, -0.190361, 3.108279, 4.492822, -2.141525, -7.146518, 0.864536, -0.308609, -3.190335, 1.884518, -2.140036, -4.393816, 5.219053, -3.987568, 0.956274, 5.444406, 2.571494, -1.917619, 1.185638, -2.083780, 2.535819, 1.183141, -1.278997, -5.613052, 5.368552, -2.721700, -2.822282, 3.754762, 0.406334, 2.300485, -0.479974, 3.864220, -2.035357, -1.186809, -0.974274, -4.490512, 10.481380, 2.418972, -0.618924, 0.359614, -0.561789, -3.777701, -2.402584, 5.838708, 1.250841, -10.286187, 5.780018, 0.785446, 2.115176, -1.887090, -0.243522, 2.895624, -1.662971, -3.449104, -3.143673, 2.112755, 0.767009, -3.156452, 2.051345, -2.255293, 1.452958, -2.069252, 1.128322, -4.785990, 4.391029, -0.798171, 1.346062, 0.089269, -2.189576, 3.963068, -0.173651, 3.700278, -2.225943, 2.738409, -1.065400, 7.461505, 2.302134, 0.258173, -0.590334, -1.458263, 4.825621, 0.427273, -4.727118, -3.176454, -0.720255, -4.421970, -1.885394, -8.331471, -0.898200, -2.809368, 3.184264, -0.336558, -2.991859, -3.125097, 3.946863, -0.191780, 5.518604, 5.678664, 1.747232, 2.403631, 4.170382, -2.577474, 3.439462, 1.757885, 3.839143, 2.149831, 3.895440, -1.333357, -7.307757, -0.745806, -1.965429, -1.692224, -0.640169, -2.174121, -6.909765, -0.805699, -0.254699, -1.982787, -1.696193, 1.624363, -3.374143, -1.112641, 4.464109, 2.885069, 5.247915, 8.498940, -6.992555, 2.244478, 5.405650, 1.442541, 4.521284, 7.956601, 3.392406, 2.552805, 0.935176, 1.990296, -1.080453, -9.796060, 2.652627, -1.339259, -4.965900, 4.275986, -2.938723, -0.824551, 5.846297, 5.951045, 0.189309, 4.208355, -6.134343, -5.720004, -2.064939, -0.101766, -3.184957, 4.232388, -2.758948, 4.617232, 4.204010, 1.023525, 4.624685, -0.723134, 3.129544, 2.440374, -1.593417, -0.517339, -2.116434, -1.257252, 1.141493, -2.265105, 0.470332, 1.526333, 7.134171, -8.550121, 0.918815, 3.052677, -8.046408, 2.606006, -2.745829, 4.694561, 6.510378, -2.790728, -1.683888, 0.710633, -3.552100, -9.357345, -3.631136, 4.263342, -0.859570, 0.555153, 2.379071, 1.508410, -4.484651, -2.065818, 0.476430, 2.149168, 5.995524, 2.064835, 6.370126, 4.561896, -2.605585, 1.645168, -6.452579, -11.317283, -0.358067, 1.925005, -0.196182, 8.038802, 3.608488, 7.714385, -10.653518, -5.490979, -0.465200, -3.811194, 0.012278, -5.339849, 3.102062, 1.361688, 1.574220, 0.267003, -3.299564, 0.730598, 2.265533, -1.941283, 2.960898, -3.787869, 2.582421, -10.016512, 2.954032, 3.424248, 4.218877, 0.929120, -2.192205, 6.571204, -1.742432, -2.160249, 2.296117, -3.335238, 0.620580, 2.971811, -2.652335, -2.441548, -1.402895, 0.231450, -2.608907, 1.946349, -1.566902, 0.134079, 0.737649, -3.240856, -2.809163, -5.268944, 3.230399, 1.785469, -3.767531, -2.465462, 1.129981, 3.577134, -0.362156, 0.332362, -2.061025, -4.494302, -8.171445, 6.567798, 1.388676, -0.058136, 0.309257, -4.941567, 1.235887, -2.312440, 4.221750, 1.207944, -2.247807, -2.391928, -1.510923, -1.211885, -3.429729, -0.122078, -13.750183, 1.711732, 4.617781, -4.478233, -1.037152, -6.811087, -2.053206, 3.017191, 4.490006, -2.499353, 3.093947, 5.947297, 0.928783, -2.458669, 4.793067, -2.253219, -1.427716, 0.447170, 2.260979, 4.120274, 2.983228, 1.871701, -4.150555, 2.458041, 2.845617, -1.781210, 4.040555, -9.113665, -0.372977, -5.971147, 4.331969, 1.156346, 3.464413, -0.917720, -2.057364, 1.338995, -0.613709, 15.391037, 1.301293, 1.529916, 0.432844, 3.230051, 1.065737, 1.038655, 6.989000, 5.168911, -2.805875, -1.107229, -1.116298, 1.466713, 0.670026, -2.802330, 3.990272, -3.455877, -3.597877, 0.457399, -1.454429, 3.925536, -2.317268, 5.982974, 7.452402, 6.825566, -3.050802, -7.312980, -4.084411, -2.939994, 3.978261, 2.160261, 5.109196, -8.970958, 0.779880, -5.015825, -1.141529, -0.537033, 7.575136, 2.682674, -4.574853, -2.686847, 4.847825, 1.805855, 4.005582, 4.142418, -2.148999, -1.202242, 1.485286, -3.422876, 2.369185, 1.520807, -3.121947, -2.290476, -2.816984, -2.241665, -0.188644, -0.776091, 1.388287, -1.727586, 0.995214, 2.813050, -10.039236, -2.676336, -6.747746, 0.699561, 1.167147, 0.672277, 0.510124, 0.887438, -0.437318, 3.920981, 0.440696, 0.791456, -0.086331, 1.989796, -8.740387, -2.689650, 1.919982, -2.022962, -2.553326, -1.109706, -3.103248, -3.047822, -3.717120, -1.876971, -0.950574, 2.495182, 4.061826, 0.882602, 2.778237, 0.607692, -5.894154, 1.629580, -0.082430, -9.960587, -1.067879, 5.680601, -3.571357, 1.432954, -1.086164, 1.989078, 3.262969, -0.808760, -1.806759, 0.225926, -0.216880, 10.205147, -1.013903, -3.004317, 0.238551, 5.560017, -6.469180, 1.412308, -2.933345, -1.669388, -2.127402, -5.822831, -3.841346, 1.505797, 2.863064, 0.047215, 3.476713, -2.252618, -2.842319, 4.505446, 3.452515, 2.608652, -4.584861, 1.434406, 1.214926, 0.571998, -1.643873, 2.164624, -1.891873, 4.223834, -3.363714, 2.083188, -1.403636, -8.511599, -6.646598, -1.559517, 3.051268, 1.154054, -7.887523, -3.502010, -7.295514, -2.090461, 0.076098, -5.151084, 3.241740, -0.180826, -0.464829, 0.837915, 2.880558, 3.190061, -5.721234, -5.494610, -4.584269, -1.480753, -4.052497, -3.668670, 3.334295, 5.760226, -6.020747, -8.985699, 1.530551, 0.829141, -0.523202, -3.265718, 3.966792, 2.476520, -2.400422, 4.657011, 4.518209, 11.938382, -4.791951, -6.905545, -8.331190, 9.066387, -0.555614, 2.061512, -2.402011, -7.306226, 1.855147, 3.911253, 4.643103, -5.161595, -0.178787, 4.647064, 2.532321, -3.979628, -3.421681, -7.873421, -1.694193, -0.977807, -6.005462, -2.696708, -2.635936, 1.249733, 1.558081, -1.054593, -5.677372, -6.848904, 12.375548, -1.048685, 9.451868, 0.770849, 0.004874, 0.749022, -3.343159, 2.868762, -6.788619, 2.329558, 2.230700, -2.796788, -0.746475, 7.273098, 1.725730, -10.017762, 3.896900, 1.245244, 5.167799, 4.762322, -5.168476, 5.321026, 2.704406, 3.907748, -8.864113, 21.655006, -2.555596, -4.513848, 10.126822, -5.159975, 0.338446, 3.130043, 7.764854, 0.964370, 2.991766, 1.586604, 0.347946, -1.534842, 0.742445, 5.535137, 3.090668, -0.835333, -2.897550, -12.371419, -2.731759, 1.227505, 7.692724, -3.102294, -0.357898, 2.924825, 1.412608, -3.414799, 4.227999, 0.096757, -7.042586, -3.655795, 4.494142, -5.498868, 9.541218, -3.288043, -2.688002, 5.775307, 3.201674, 4.313481, -0.658947, -17.299377, -2.143222, 4.300400, 1.396133, 0.652829, 8.997602, 0.394900, -3.026657, -3.423042, -2.098510, 3.731851, -1.618864, -3.534496, -4.114303, 0.777979, -7.618009, -16.085608, -2.674588, -0.311673, -7.138084, -10.863721, -1.879250, -5.479012, 1.067819, 1.009926, -4.360136, 6.908758, 0.637827, 5.021507, -1.089686, 3.845634, 0.777320, -0.520167, -1.586611, 1.657471, 3.628635, 1.447507, -3.954765, -1.489921, -2.505539, -3.786132, 2.921121, 7.683924, -4.154095, 0.724016, 0.943233, 1.858200, -3.725208, 0.990608, -7.652709, 3.203846, -4.016896, 0.687831, -3.904177, -3.953215, 5.366427, 6.889678, 6.707153, 8.301224, 9.560975, -1.522947, 2.084971, 0.079911, -3.623190, -6.227364, -3.239978, 0.914633, -1.201463, -0.285876, -5.000318, 9.624385, 1.343760, -0.730148, 8.851679, 1.207522, -0.340679, -9.117331, -4.930827, -1.469150, -8.287401, -8.861391, 2.041766, 4.612850, 0.045618, -6.774024, 2.442700, 3.126625, 2.628376, 4.009446, -4.584364, -4.274092, -0.898908, 1.249408, 11.149524, 8.727787, -6.110678, 3.100002, -6.880338, -2.373674, -0.941194, 3.267503, -4.888073, 6.996388, 3.436719, -0.600954, -2.654682, -8.906318, -5.176072, -1.354449, 3.938689, -5.618114, 5.184235, 2.933329, -8.471808, -0.485859, -3.493238, -2.416934, -2.808935, 5.411538, -2.704484, -4.772042, 0.483029, -2.779760, 4.672338, 3.290915, 3.207643, 3.355855, -12.556318, 2.987458, 0.511866, 1.037334, 4.601452, -7.491779, 0.815237, 1.669678, 25.821959, -2.990469, 0.341863, -8.546199, -57.959263, -6.861439, 7.197748, 3.185223, -3.890601, -3.807753, 7.488586, 0.738601, 5.186331, 4.751957, -3.189037, -2.101480, 2.811802, 4.692187, 4.650622, -0.388142, -0.563713, 4.468437, -4.456480, -0.434222, -1.902659, -4.143910, 5.255686, 1.149921, -2.154580, 1.160092, -2.100077, -1.200469, 2.620747, 0.045106, 1.217008, 0.872633, -1.279750, -4.014895, -2.671194, -0.317751, 72.818169, -0.274673, -1.522603, -1.679586, -1.681878, -1.658200, -1.187360, 0.330624, -3.366315, 3.558348, -7.375103, -0.864948, -5.678682, -2.768099, -4.980128, -2.315590, 3.863862, 3.874607, -0.397576, 3.207895, 1.091891, 1.793154, 0.884546, 10.288604, -1.982587, 3.128799, -2.658461, -6.311664, -10.789027, 11.487055, 1.278287, 0.116743, 2.171052, 2.254938, -2.452972, -2.434953, 0.847673, 7.682579, 1.807902, 4.112731, -4.216558, -4.923490, -0.496755, 7.823430, -3.785221, -2.627351, -2.087911, 2.650225, 5.552239, -5.799509, -1.966866, -13.046121, 3.335694, -2.779527, 8.939444, -21.493067, 3.353523, -0.808833, 1.012164, 0.383040, -1.610640, 2.471483, -3.263705, 2.747091, -0.280259, 2.336903, -6.732734, 1.303764, -1.920047, 5.165965, 0.111381, 5.318671, 0.422425, -0.942677, -4.163383, 3.023600, -0.684197, 5.766990, 0.311750, 1.133976, 0.124674, 0.795275, -0.862553, -2.777301, -3.046375, -2.968873, -4.754703, 0.717553, 6.495907, 6.162051, 0.042087, -0.678317, 12.963668, -3.955312, -11.636727, -0.769162, -5.909076, -0.539896, 6.502483, -0.103502, -1.372625, 2.357192, 1.925273, 1.399435, -5.294392, -2.933077, 2.778971, 2.272954, -4.688748, -3.429728, -3.411602, 3.354655, 4.497555, 6.116796, -4.520513, 0.076594, -47.672466, 0.073998, -6.041132, -2.421984, 2.042518, 4.413757, -2.072258, -1.666935, -3.535879, 1.346258, 0.478230, -1.063917, -7.310431, -7.991882, 0.030024, 1.886322, -8.401767, -1.692304, -0.510863, 3.041207, 1.225550, -2.396562, -2.626087, -0.229872, -11.108354, 0.600951, 5.914285, -0.088484, -2.257015, 1.889441, 4.506021, 4.414992, -9.895139, -4.587586, -1.472980, 12.623730, 2.343815, 1.844113, 0.155746, 0.860875, 8.530779, -3.720360, 5.726038, 1.089025, -0.996229, -9.634427, -11.579075, 4.942333, -2.901795, -2.754212, 1.061458, 1.780601, 6.769283, 5.579513, -4.578234, -1.976412, -0.174598, -4.009778, 8.133586, 3.975948, 0.620623, -3.166523, 3.254071, -5.477593, -1.372344, -0.128217, 2.143379, 0.711423, 0.181416, 4.484520, 1.924787, -3.877091, 1.482431, 3.593784, 3.742536, 1.208023, 1.879081, -0.047399, 2.581715, 2.983128, 4.446198, -3.142582, -2.353244, 2.860869, -1.478992, -0.129871, 0.284226, 0.876769, 4.556657, 2.544342, 5.677957, 6.531505, 5.851432, 0.441063, 1.460388, 3.379210, 3.498836, 3.946058, 3.750448, 1.786243, -4.732514, 8.103845, -0.957042, -7.004573, -2.709820, 4.072583, 4.511800, 2.521903, -6.146012, 5.279287, 0.575812, -1.507474, 4.029167, -6.929080, 4.972468, 6.486836, -1.219782, 0.701163, 5.481197, -0.645679, 5.140512, -7.023633, -4.284097, -6.846494, 4.500613, -5.177200, 1.924386, 3.206387, 7.535226, -0.964616, -0.910353, -0.939528, -0.022958, 1.235824, -4.868237, -5.670794, -3.356471, 1.502983, 3.786366, 3.423632, -0.402663, 2.183487, 2.224744, -1.873978, 3.550165, -2.142070, 4.959284, 2.750160, -8.151263, 2.305051, 1.590026, 6.649673, -0.462925, 13.728370, 5.043933, -3.782001, 0.586798, -4.597447, -3.100800, -1.378564, -1.036960, -4.756698, 3.392843, 0.098122, 7.496067, 1.276794, -2.201010, 12.045688, -2.050238, -2.662544, -4.651062, -5.942354, 0.904822, -15.855501, -2.120624, 1.871660, -3.271507, 5.968053, -11.749590, 5.864941, -5.111817, -0.975109, 1.543735, -1.594194, -6.976959, 3.590129, 4.134421, -11.131968, -2.028036, -6.524039, -4.008910, 4.039269, 0.079027, -4.533221, 8.506838, -1.361423, 1.549692, 3.190930, -0.073228, 3.400214, 1.610538, -1.334574, -8.067868, -5.087399, -2.729920, -0.373806, -1.636357, -3.642413, 2.538674, 4.916176, -3.533566, -2.162897, -1.618924, -4.204897, -5.115014, -3.544823, -1.597110, 0.494041, -2.240284, 4.748669, 9.349570, -2.634223, 4.627161, 1.736369, -1.277352, 3.055332, -0.612900, -5.664233, -7.366509, 1.622354, 0.968671, 10.669295, -8.042500, 10.230959, 3.214252, -4.011172, 6.232887, 3.857274, 1.902077, 0.706732, -2.590447, -5.315027, -0.226375, -3.639945, -4.291627, 2.910607, -2.851480, -2.199380, -5.348783, -2.055163, 12.780025, 2.655058, 6.954868, -3.661295, 2.167846, 0.434934, 0.436317, -5.767696, 2.771549, 5.062878, -0.617042, -8.748747, 6.545790, -0.899979, -1.069733, 0.529967, -5.809291, 0.556308, 6.874477, 5.277372, 4.169679, -5.629921, -0.249756, 4.796238, 7.292950, -4.699057, 11.379167, -2.491559, -1.708757, -9.385721, 3.035690, -3.613850, 11.371363, 7.384349, 0.989243, -2.333371, -5.939830, 0.762153, -0.475695, 4.556848, 2.328523, -7.775479, -2.024576, 8.063655, -7.785150, -6.127687, -1.686662, 4.035010, -4.927999, -1.375932, -0.560970, 2.860914, 1.378267, 2.240504, 5.007919, -4.354797, 3.506239, -0.676417, -3.054658, -1.389955, -4.676187, 2.296721, -3.969944, 0.529669, 7.909115, -4.237406, -0.696890, 3.972103, -2.716695, 5.140221, 0.518986, 3.473999, -2.443658, -2.689824, 0.467211, -7.972925, -4.338316, -3.179682, -4.076129, -0.004530, -3.895267, 8.064905, 7.706927, 3.333316, 2.809827, -1.146712, -0.130314, 0.694602, -1.394667, -1.133426, -4.963870, -3.307344, -2.904934, 3.678267, 4.123675, -12.262660, 5.079062, 2.435183, -1.666260, 7.668711, -5.765028, -0.798359, 4.794229, 6.620828, -3.358037, -2.250828, -1.557810, 4.582862, -6.650002, 0.642422, 7.357230, -0.317707, 2.141603, -2.768049, -0.753847, -0.552432, -0.858747, 6.015222, -5.603873, -1.312301, 1.108734, -3.475725, 4.387824, -0.271075, 6.087353, -2.430286, -3.067877, -6.732006, 0.182969, 1.136600, -7.581763, 7.243490, 3.132981, 1.896595, 1.194428, 6.584229, 7.422012, 2.498844, -5.440595, -0.854828, -1.774024, 0.406536, -0.738175, -9.008422, -1.769468, -4.130078, 7.411235, 0.241957, 1.977909, -0.985168, -2.942751, 2.039368, -6.933027, 3.331693, -7.329825, 4.314275, -7.256344, 3.564003, -1.318340, -6.307499, -2.822379, -2.331321, -0.687384, -0.432042, -0.330118, -2.285890, 3.763629, -5.730393, 1.598254, 1.966150, 4.906727, 3.835798, 4.303470, -1.755081, 1.831058, 1.456044, 5.163886, -3.070241, -4.535691, -5.533028, 7.793821, -0.958387, 0.161610, 4.340654, -0.431025, -5.204406, 1.275669, -8.160251, 2.356874, 1.587846, -0.244014, 0.993669, -3.694689, 1.563022, 5.399471, -5.073840, -3.126203, 0.948787, -4.177573, 3.693822, 3.871990, 6.277728, -1.348473, 4.640944, 3.815053, -0.938187, -1.165718, -0.180555, -4.312121, 1.375381, -10.583527, 1.926107, 5.039911, -3.980606, -1.254809, 3.968797, -3.856108, 0.365484, -5.768256, -3.118121, 1.625351, 4.106509, 5.812869, -6.065194, -5.483398, 3.973883, -1.708879, 1.518483, -0.220917, 1.255598, -4.182369, -6.526254, -2.248013, -1.433254, -0.696151, -3.830321, 0.127961, -1.540217, 2.604615, 1.242860, -4.627748, 0.283364, 6.120577, 6.530550, 2.017253, -1.432310, 1.281243, -0.537177, 2.646643, 3.722222, 0.557243, 1.116598, 3.271622, -4.565522, -0.453311, 3.175609, -1.267959, 3.857802, -0.692382, 5.890440, -1.637853, -2.580369, 1.959585, 0.735434, 8.119528, -4.732760, -4.281412, 0.511573, -0.933117, -1.127330, 15.441397, -1.013427, -1.233577, 3.462357, -1.734491, 0.680633, 2.449418, -0.234776, 5.454259, -4.323566, -3.023840, 6.850878, -0.942867, 5.765671, -3.467015, -6.242991, -7.451763, 2.945967, -5.463686, 0.189843, -1.386753, -5.821880, -2.037198, 3.015718, -8.932395, -1.388764, 1.455249, 1.704442, -6.780524, 3.084629, 4.529232, 0.981996, 1.693487, -0.297318, -1.638915, 6.350910, -4.167922, -1.515015, 7.637380, -2.958383, 4.897805, -7.789161, -0.392564, -1.537700, 0.858776, 5.029025, 2.858186, -1.974044, 7.591927, -0.049241, 2.494700, -0.047159, -2.806561, -0.060406, 3.263311, 0.360731, -0.459208, 0.966681, -2.915567, -3.130369, -0.622113, 1.856929, -7.741611, 2.707514, -0.712287, -3.967048, -5.445321, 4.626101, -0.967011, 8.001504, 0.901099, 2.290556, 2.533530, 0.057316, 3.366540, 3.716669, 8.090490, -2.582757, -0.333340, 4.146890, -0.922499, 3.378407, 3.768361, -4.073958, 3.286473, -5.570380, 5.806504, 1.360253, 0.565697, 0.608026, 7.096983, 3.965935, 2.117180, -1.423239, 1.241112, -3.526773, 6.318761, -0.442196, 2.996957, 5.560712, 12.985066, -4.835479, 5.019740, -4.981246, -4.116704, -2.050514, -1.667424, -4.875980, 1.503455, -2.008132, -2.906037, 4.770177, -0.976920, 2.429173, -0.911758, -2.752666, -3.897050, 5.040503, 3.460028, -24.602287, -5.098180, -6.328380, -4.805958, 1.553041, 3.066551, 0.216922, -1.931856, 0.310250, 2.446584, -3.422905, -6.960899, -4.071479, -7.258070, -0.790623, -6.343513, 0.978405, -5.713193, -4.039086, 6.117209, 0.956974, -5.267104, 5.572167, 2.808410, 0.274577, -5.325370, -4.834610, 1.844393, 10.592550, -3.991334, 3.880352, -7.561521, -0.833719, 0.902660, -1.233636, -4.647372, 1.025466, -2.701486, -1.585116, 0.601460, -5.395946, 0.895798, -3.280932, -1.410241, 0.349303, 5.957426, 4.516736, 0.785583, 2.897660, 0.106632, 1.949055, 3.427114, 2.503825, 2.539254, 0.021046, 1.653128, -1.125087, 4.093799, -0.777973, 4.705805, -0.412391, -4.549866, -1.732447, -0.666627, -3.793111, 4.220653, 3.186419, 0.723626, 2.166250, 1.754366, -1.169123, 2.267691, 4.660487, 3.483028, 6.041733, 0.969664, 5.984068, 2.085470, -3.209600, -3.176646, 5.158224, 4.955115, 5.085187, 1.528159, 2.763774, 6.905697, 1.378222, 3.018082, -3.203681, -5.697320, 9.362370, -3.822440, 0.687714, 7.065468, -3.620854, -1.068926, -0.659893, 6.244454, 0.356127, -1.408702, 3.371244, -3.360366, 0.834484, -3.404024, 7.210449, 3.206043, -6.253275, -3.466505, 7.853566, -2.665757, -0.684967, -3.988544, -0.479689, 5.326033, 2.021157, -0.206972, -0.836225, -9.419202, -1.627847, -1.403879, 1.130102, -1.728503, 5.926775, 2.344648, -5.166494, 0.622071, -10.816072, 0.119801, -7.232834, 1.983062, 2.584320, 1.949506, -0.408435, -1.867195, 4.516873, 2.080892, 1.941095, 0.494597, 0.089801, -4.194734, 1.500237, -5.619621, 1.784577, -0.276579, 9.975770, 0.117329, -8.132631, -0.803383, 5.854777, 9.448901, -10.612700, -0.738503, -0.731102, 1.529682, -0.489255, 6.687905, 3.029937, -5.183012, -0.238438, -4.780541, -0.097051, 4.321120, 0.305233, 2.534451, 0.759206, 1.449946, -7.284632, -0.724230, 5.051780, -1.913272, 11.689046, 1.806538, 4.965152, 2.657064, 3.413854, -5.321180, 2.220038, -0.484918, -1.116065, 3.195233, -3.543233, 2.873127, 0.734757, 3.213889, 3.447571, 0.588481, -0.118765, -2.447142, -5.046257, -5.135880, -1.192211, -1.002373, -1.340682, 0.807711, -2.720257, 1.137674, -1.123838, -5.477874, -2.690656, -1.423756, 1.283039, 1.119945, 1.476256, -0.959476, 0.120415, -0.333202, 5.809803, 4.728014, 4.327016, 5.169444, 0.794055, -4.507209, 0.095459, 4.366117, 2.586082, -3.219173, -6.210899, 1.348389, -2.038010, 3.369182, -2.846389, -5.263379, 3.744565, -1.082742, -4.733845, -2.851035, -0.690051, 1.126368, -5.753883, -1.434126, 0.639843, -5.872741, 2.279959, 4.675051, -5.324467, -10.310712, 1.810941, -6.226061, -1.814752, 2.493179, -0.650456, 1.417511, -1.273579, 5.576836, 5.641691, -0.388570, -0.676326, 2.536535, 7.995044, -3.511673, 1.615828, -2.204821, 3.207858, 6.377557, -0.962587, -3.265275, 0.799848, 4.604529, 1.244043, 0.650347, -2.098049, -1.020897, 9.456820, 0.773542, 0.310306, 5.469886, 5.460999, -0.805781, 1.347621, -5.537677, -0.655692, 2.221136, -3.225057, 0.772083, 0.606927, 6.593863, 6.725516, 4.217551, -0.319320, 0.378998, -9.116841, -12.747993, 2.226322, 3.811626, -0.126707, 2.969726, -1.477451, -0.378593, 3.390436, 1.826288, 2.390730, -1.385699, -4.330629, -1.510868, -0.545701, 3.081455, -1.118920, -1.966406, 5.322524, 0.613941, 2.093357, 4.134116, -2.701830, -0.509763, 3.020345, -0.263120, -6.160770, 5.853301, 4.288284, -5.511547, 2.915318, -3.539979, 1.122932, -3.405716, 2.185867, 1.715794, -3.666327, -3.478908, 0.295895, -2.676396, -2.359107, -1.866933, 0.797962, 7.096612, 0.064292, -2.482887, -1.964745, 1.174466, -1.427787, -0.547333, 2.101931, -8.511395, 0.405700, -3.375213, -3.197227, -2.439764, -1.360264, -2.913413, 3.843259, -2.007440, 3.775483, -4.897901, -1.232008, 1.066024, -3.550373, 1.529685, 1.323544, -2.433049, 0.770832, 2.926789, 2.587296, -6.737079, -0.852933, -0.235982, 2.178667, -5.808867, 1.977460, 2.142372, 3.881558, -5.669499, 1.582753, -4.487857, -2.140340, 3.822457, 3.156929, -0.784372, -11.919968, 2.732753, 0.720352, -1.074247, -2.517467, 0.212668, -2.007519, -5.329519, -5.339283, 8.835780, 2.035633, -5.028576, -0.937121, 2.151216, -3.267512, -1.287972, -1.488874, -0.354031, 0.224125, -1.105438, 1.056294, 6.083877, -0.475878, 5.149827, 1.604487, -7.690906, 0.559727, 5.628377, 1.652162, -3.740532, -5.827209, -2.290152, 0.813187, 4.807639, 0.445966, 3.805639, 0.770459, -1.839797, 1.350892, -1.814946, -0.847039, -3.721893, -2.621535, -4.462288, -1.348650, -2.237405, 0.306764, 0.791332, -2.083073, -2.704147, 3.735518, -4.753408, 1.343448, -0.313119, 2.784453, 2.194219, 3.320808, 2.609183, 1.682797, -1.709125, 0.583227, 0.166908, -6.052476, -3.839477, 5.245582, 2.119853, -4.421228, 3.879004, 1.128520, 0.561174, 1.135597, 1.555886, 5.035879, 2.007892, -2.588021, -4.674111, 2.013049, 1.186565, -0.874717, 4.819943, -0.970881, 0.407302, -3.039956, 1.841424, -1.702842, 6.212122, -4.744252, -4.397005, -0.945604, -4.804412, 7.296845, -5.887240, 4.321653, 1.352454, -8.227689, -3.597377, -1.649933, 5.057769, 4.388936, -5.530170, 0.288266, 3.812274, -0.723873, 2.812019, 10.936550, 0.128492, 3.857232, 1.627732, 2.630692, 4.487742, -3.620348, -2.386631, -4.531271, 9.979341, 3.510550, -0.302120, -0.265527, -2.551227, -6.043945, -2.087475, -3.116405, 1.148862, 0.456963, 1.133632, -2.793258, 1.727798, -4.147748, -0.056471, 1.409196, 0.633240, -0.925451, 3.875555, -0.373213, 1.008518, -0.027705, 0.011419, -3.924354, -1.880724, -2.316895, -1.747171, 2.857554, -7.953085, -5.626114, 1.550129, 4.746271, 4.696819, 0.240030, 2.531980, 15.833130, 0.971987, -4.606957, 2.627638, -2.664799, 3.263517, 1.898216, 3.809264, -1.514982, -5.362910, -0.347393, -3.474900, -0.638306, -0.228928, 0.702787, 0.271248, -1.160489, 0.159041, -2.044232, -0.764671, 0.094077, -2.514994, -0.532551, 1.893106, -4.758446, -4.132962, 2.440339, -2.551711, -2.038148, -1.968286, -0.260521, 0.393995, 0.527462, -3.963587, 1.928428, -2.928052, 3.469846, -0.296801, -0.104693, 0.559026, -0.312715, -1.335794, 0.120195, -0.155378, 8.477010, 2.909681, 1.191940, 0.912483, 1.678987, -1.485329, 2.019185, 0.006450, -0.548562, -1.414819, -6.675617, -3.839057, -10.587205, 8.054903, -0.601675, 2.120443, 2.867933, 0.452214, -0.157625, 7.559044, 0.066763, -0.346538, -0.679744, 7.143521, 2.798456, 4.390249, 0.547103, -0.828780, 0.894876, 3.495336, 0.999051, -0.162726, 0.927784, -0.997106, 5.079284, -2.757908, -2.823677, 0.840591, -0.101002, 0.895782, 0.422564, 1.383721, 3.118566, 3.713212, -1.116651, 0.350085, 3.952912, -1.482127, 3.851310, 1.132925, -0.018721, 1.863041, 0.292804, -2.271758, 3.700714, -1.889969, -8.257089, -1.723338, -1.162702, 2.041800, 1.053195, -2.891562, -18.913019, -1.537845, -4.574075, 1.734250, 4.120369, 0.549128, 5.679949, -1.484915, 5.160435, -0.268740, -1.555882, 0.922197, 3.002130, 0.116955, -3.772360, -1.805371, -0.604614, 0.137429, -3.511200, 0.931349, 0.355020, -0.204878, 0.550382, -3.529668, 0.334168, 4.264834, 5.036866, -1.105709, 2.041065, 0.835190, 0.624881, 1.791567, -3.434286, 1.122225, 0.606482, -2.021971, 1.331716, 3.309259, -1.130233, 1.147773, 1.803062, 0.061684, -1.037129, -6.389064, 3.572050, 0.107109, -2.976307, 1.539801, 0.022954, 0.191456, 7.381106, -2.153169, 2.224567, -2.784935, 1.598683, -3.104459, 1.023445, -2.912184, 0.103114, -1.707224, 0.194435, 0.985669, 1.219664, -18.701944, 1.770715, -0.215786, -1.886590, -1.313887, 0.379953, -0.811773, 1.726949, 1.761581, -6.506214, -0.970271, 2.834483, -3.703697, -3.972107, 0.371798, 1.186142, -4.292978, 0.112968, 0.986375, -0.553960, 0.242243, -0.866809, -1.040570, 4.414179, 2.866549, 3.117313, -3.775796],
+	"internlm2:latest":     [5.061247, -0.231503, 2.588457, 1.067168, 0.639063, -6.508947, 2.713553, -4.196918, 1.201719, -3.954562, 0.397499, 0.665700, -0.373104, 3.420092, 2.525895, 1.166159, 0.135273, -1.422993, -1.334097, 0.629137, -7.037499, -1.594845, 4.286449, -3.437519, -1.638770, 4.078342, -1.257553, 2.342320, 4.363889, -2.744817, 1.405284, -0.473245, -1.080851, -2.587262, 0.633063, 0.464861, -0.259242, 2.882139, -1.092318, -2.586674, -1.025380, -4.536989, -3.841810, -3.341502, 5.938951, -2.299690, -1.749035, 1.184345, 5.063748, 0.792942, 1.243212, 5.516513, -2.790794, -1.276075, -4.534899, 3.715642, -3.791056, -5.819790, -0.919605, 1.451963, 0.649228, 0.343269, -2.556309, -4.416265, -0.184799, -1.028780, -1.509403, 3.096626, 4.423926, 1.392064, 2.056307, -7.209401, 0.257314, 0.226362, 5.174077, 0.345125, -2.266287, -2.005017, -3.846150, -1.313890, 4.227207, 6.955594, -4.165278, 0.299796, -3.245447, -0.149341, 0.321417, 0.187456, 2.233084, -3.411223, -0.836206, -3.420860, 2.934996, -9.119430, 0.097401, -4.268331, -2.307719, -3.476908, 2.335415, 1.019653, -0.575591, -1.621845, -3.839381, -1.752010, 0.723238, -2.752960, -1.486039, -2.191680, -3.217196, 1.473283, -4.561637, -0.163969, -1.542991, 0.560197, -15.695994, -1.109497, -1.806454, 1.372928, 1.925100, -8.137605, 9.525015, 5.316425, -4.975033, -0.825283, 0.438482, 0.641941, 2.960293, 3.527466, 1.868934, 2.090116, 0.798059, 2.660561, -5.010262, -3.538967, -2.643612, 6.393401, -2.147769, -2.526807, 2.325527, -1.141852, 0.187074, 4.366103, -1.343357, 2.663101, -2.388203, 7.109671, 1.708573, -3.242779, -0.175443, -1.789128, -2.733232, 3.111445, -0.469209, -8.575769, 4.222897, -0.028589, 0.682822, 7.332674, -5.071147, 4.716674, -0.557671, -1.645178, 1.872882, -0.017428, -0.709880, -5.856006, -0.376602, -1.407626, 6.218358, 2.250383, -2.505142, -2.895295, -6.551757, -5.177249, 24.057529, -1.202249, -7.279227, -6.273926, -1.637197, -1.717246, -1.007484, 4.553564, -4.807058, -0.805701, 4.396732, 2.509762, 0.401592, 3.622766, -1.253119, 2.561592, -27.404514, -0.643606, 3.348947, 1.838143, 3.371135, -1.500990, 2.856364, -3.396603, -1.188622, -4.619108, -1.153197, 4.663440, -2.009603, 0.344214, -3.262805, -1.938819, 0.726469, 2.213094, -4.225058, 3.463687, -0.537457, -6.544409, -4.219234, -1.230688, -2.658906, -0.808681, -0.051084, 0.944790, 2.601959, -3.345125, -2.134279, -6.127693, 5.400606, 1.405114, -6.286226, -5.871374, -4.326901, 0.939626, 0.459327, -2.888148, 0.964913, -3.523564, 3.792453, 7.624001, 2.997477, -3.895889, 0.994956, -5.825964, -1.577850, -2.738399, -3.672004, 8.851274, -2.867324, 5.217916, -1.113662, -0.199251, -4.990279, 2.785989, 0.925950, -2.605497, 5.271828, -1.838495, -6.714372, 2.304076, -3.585959, -3.600795, 3.694057, -1.135462, 6.889727, -0.454378, -1.975031, 6.046408, -3.825401, -1.946825, 5.755115, -9.280190, -3.701879, 2.500824, 1.049162, -1.036308, -2.931208, 0.655454, -0.526017, 0.155298, -4.066710, 5.787239, 1.007156, -1.677976, -1.923831, 3.971688, 10.829790, 7.744619, 7.314122, 28.896027, -4.738491, 1.426370, 0.738208, -5.452198, -7.496375, -3.499172, -15.695760, -5.750883, -1.174744, 6.486292, -0.237689, 1.870942, -2.951409, 0.658425, -0.447866, -0.076158, 3.413381, 1.638950, 0.280836, -0.231159, 2.032212, 2.940922, 0.591556, -1.669960, 2.941601, -3.021569, 3.658772, -0.426081, 0.865577, -2.619977, -0.916667, -4.567581, 6.830680, -1.625737, -5.243823, 2.782119, 3.282244, 1.116829, -0.427940, -0.300249, -4.410540, 1.400093, -1.888339, 2.741519, -4.364941, 2.187036, -1.132823, 1.077472, -3.309915, 1.454134, -4.608842, -2.445352, -2.925962, 1.095848, 2.496021, 1.057507, -1.029777, 0.220468, 0.362259, -2.373489, 0.882088, 3.134756, -1.146141, -7.990649, -1.160275, 0.380594, 6.351787, 7.281243, -3.807301, -3.881965, 2.582957, -6.088000, 4.956009, -3.621352, 2.759395, -2.421424, -2.243220, -6.692789, 1.947197, -1.775349, 4.495222, 1.503652, -0.909473, -0.653658, 8.284077, -0.876597, -0.136130, -0.970865, 4.287241, 5.301243, 4.046627, -4.381618, 2.058830, 2.515426, -1.943246, 2.832242, -5.611320, -0.062267, -5.000455, -3.713669, -7.901437, 1.488344, -0.372438, 0.134102, 2.772597, 4.079040, 1.760607, -1.572206, 2.533317, -1.507193, -0.443952, 4.203324, 0.275423, 4.427946, 1.618125, -0.390027, 2.804054, -4.883042, -1.828983, -2.090811, 4.276200, -0.214048, 2.147459, -0.309282, 0.956251, -5.969341, -2.051353, 4.603029, -0.944236, -1.789566, 0.084430, 2.212379, 2.903875, 2.756856, 1.757860, 3.429894, -8.464258, 2.010800, -4.168521, 0.476422, 8.876191, -0.046917, -1.662317, -8.808737, -0.096040, 1.565808, -3.285136, 4.959125, 1.565729, 1.549600, -2.985832, 5.915885, 0.455242, 4.394282, -2.599325, 0.458896, -0.404525, 1.149395, -1.969155, 2.687780, 1.759553, 1.115179, 3.094874, -3.141999, -4.941475, -4.853149, 1.448547, -6.194538, -0.578595, 7.831284, -1.984477, -4.167677, -1.365347, -2.300270, 1.228716, 1.456013, -2.201469, 1.595850, -5.419332, -2.410420, 0.789430, -0.525872, 0.054790, 3.655850, -2.747402, 4.233346, 3.877509, 13.040668, 3.115983, 7.328908, -4.238483, -0.252896, -3.893379, 0.590240, -1.978889, -0.209061, -1.853956, 0.903654, 1.213564, 0.919549, 13.728156, 0.465422, -4.338054, 1.460130, -0.295520, -0.555089, -3.129691, -4.171381, 1.837397, 5.976922, 3.015670, -4.032254, -2.745596, -0.310737, -3.200476, -2.131331, 1.340346, -5.796060, 2.511221, -3.079774, -2.012100, -2.608340, 0.969537, -3.736783, -1.355915, 2.013482, -2.848377, -5.723270, 5.538749, 4.389459, -0.111431, -1.020809, -3.971606, 3.871086, 3.663780, -2.874661, 4.710732, 3.333759, 0.691214, -0.371512, -1.434366, -0.555875, 1.876416, 1.818450, 3.601551, -2.531006, 1.163709, 3.509557, 5.839750, 3.703716, -1.823416, -1.795103, 0.759579, 1.348970, -2.568928, -4.570304, -4.493628, 1.472984, -3.155058, -0.319365, -2.713835, 1.905564, -0.663457, -1.327655, -2.407827, 6.360357, -1.690539, -0.995528, -3.078851, 3.296104, 2.338831, 0.252414, 1.314875, -0.129766, -3.652116, -5.463096, -3.035827, -1.872913, -0.902017, -4.781173, 1.147719, -5.484653, 1.522828, -5.870281, -0.748309, -0.013227, -0.942376, -0.041075, 5.888425, 5.395775, 3.727763, -4.747180, 2.796907, 4.980431, 4.589695, -3.617343, -1.490041, -3.500859, 0.410755, -2.534751, 0.081086, 0.969224, -0.670146, -8.614530, 4.209755, 2.025898, 3.765331, 6.290065, 1.974810, -6.858045, -4.255695, -0.630691, 0.429667, -1.628322, -0.191547, -2.538192, 4.177691, -1.218796, -3.781093, -1.958904, -3.477312, -3.877017, 3.846811, 3.581842, -0.137654, -3.981176, 5.399758, -0.110485, 3.515115, 0.680053, 0.578790, -2.271068, 3.598449, -0.227658, 4.929263, -4.807483, 4.248407, -2.731860, 2.104155, -14.894735, 4.504198, -0.278473, 1.659851, 0.397226, 0.353838, 3.300556, 3.875627, -3.460070, 2.003856, 1.920597, 7.600002, 3.218442, -3.740824, 7.104123, 1.948618, 4.355168, -1.856144, -1.128358, -1.529000, 1.024140, -6.831550, -2.043998, -0.117732, 4.418821, -2.080518, -0.038392, 1.616336, -2.526685, 2.505723, -0.155806, -2.213610, 5.785298, -3.473212, 2.725080, -1.891800, -2.262702, -2.161195, 6.359688, -9.396100, -0.280296, -2.672043, -2.741757, 4.177962, -1.655606, -1.932398, -2.343269, -0.081301, -0.269365, -3.257321, -5.578602, 2.227818, -0.747868, -1.603817, -0.781458, 1.857280, 0.701760, 3.020540, -1.193380, -2.049258, -4.239274, -0.413911, -3.666025, -3.096007, -1.027318, -3.412259, 2.208090, 3.525542, 3.201124, 1.783430, 3.468514, 4.684199, -0.899601, -1.940475, -2.798301, -4.269429, 0.962290, 1.554736, 1.775746, 3.519588, -5.483161, 0.666180, 0.557270, 2.279576, 2.197963, 0.786984, 1.949886, -1.663766, -4.385033, -2.131468, 1.939933, -4.297215, -6.042433, -6.853386, 0.457582, 7.823955, 1.367757, -5.069343, -0.526767, 0.987899, 0.377375, -0.224402, 0.841607, -4.236011, 1.756751, -5.956668, -0.028805, -0.920623, -1.493528, 4.230742, 0.819575, -1.189212, 5.872002, -1.270913, 3.005222, -7.878338, -5.408515, -1.306229, 3.037759, -2.493772, -1.165590, 0.097988, -4.550385, 2.095839, 2.516308, -1.947471, -0.486930, -1.384863, 3.929041, -7.500520, -4.048054, -0.561791, 3.225979, 3.488237, 4.773482, -0.358355, -3.923441, -0.149555, 5.512856, -4.676888, 4.938262, -0.932411, -5.136211, 1.462612, -6.052265, -2.085773, -2.355224, -6.450393, 6.424675, 0.994802, 2.546335, 0.492838, -2.865380, -4.151785, -0.373860, -0.992486, -3.127932, 1.637338, 5.028770, -1.532269, 3.610800, -2.120996, 1.867085, 0.881059, 1.636464, 6.450935, 1.735827, -1.597024, -0.939140, -2.820829, -4.151963, 1.126840, -3.318565, 2.201536, 3.435789, -3.675163, 2.943386, 0.250281, -0.914511, 6.706475, -0.012139, -5.633416, 0.969529, -1.803715, 0.228366, -1.060118, -10.121710, -0.687450, 0.544276, -3.080676, 0.082196, -3.262676, -2.902545, 2.796925, -60.507057, -2.543181, 1.720342, -1.325740, 1.355423, 4.610939, -0.366430, 3.924400, 2.029431, -3.024478, -12.526103, 1.520504, -0.113020, 3.471127, 1.196942, -1.564157, -0.478866, -0.093020, -0.736989, -2.370159, 1.155157, 11.948485, -2.660467, -1.938226, -2.650690, -0.567669, -6.025421, -0.747833, -4.784255, -1.114224, 1.280174, 0.511942, -0.691434, 1.220410, 3.965637, -4.907683, 3.453216, 5.431615, -6.192630, 1.855827, -0.527852, -3.460788, -5.247286, 2.145873, -8.107170, -3.359477, 2.195966, -3.420030, 0.526939, 2.802132, 0.360952, -8.053123, -5.381207, -2.780623, -3.736796, 2.322341, -2.283828, -0.540574, -5.767483, -2.334425, 0.176996, -2.189201, -3.760068, -0.451196, 6.214261, 3.779209, 0.545904, -4.201845, 1.978366, 1.278257, 2.698078, 2.789446, 2.211696, -0.907324, 9.129786, 1.447129, 3.216690, -0.990769, 0.716541, -6.102333, 3.532319, 0.661573, -2.552234, -0.664280, -0.795222, 3.755283, 3.302111, 3.283321, -3.685587, 0.687869, -0.329220, -3.572280, 1.765342, 1.272334, -6.889939, -5.411784, -1.632199, 3.425769, -1.347378, -3.850362, 8.801662, 4.775460, 12.205454, 1.638725, 3.274867, 0.039696, 2.016362, 1.419244, 1.617624, 4.213040, -10.281228, 4.154506, 2.621370, -1.467056, -2.441974, 2.889356, -7.528337, 3.254175, -6.855349, 0.325416, -0.511460, 7.052696, -3.282471, 3.773828, 2.874611, -2.059987, -3.625528, 0.191727, -0.481927, 7.968199, 3.430549, -0.548765, -2.795270, 2.682204, -0.315234, -0.290829, 0.048082, -5.181992, 1.282168, -2.762165, 1.426113, 1.707000, -4.629203, -1.381773, 3.483888, 1.231746, 0.959635, 1.024321, 3.132743, 1.440143, -0.121623, -3.894713, 2.382843, 5.631348, -2.709828, -4.703189, 5.098269, 0.695087, 3.782719, 4.675445, 6.022763, -5.893468, -6.612776, -1.342764, -2.906631, -6.828164, 6.216556, -3.838661, -6.620102, 2.033250, 0.574438, 3.062449, 0.195256, 0.312320, -1.113810, 3.507383, 1.441819, 0.874012, 2.437464, 1.133729, 1.368206, 10.217820, 2.241683, -0.429320, -3.894818, 1.621964, 5.148217, -0.764651, -4.459765, 2.406326, 1.989000, 3.864517, -1.368541, -3.559168, -3.897746, -20.080606, 4.123347, -5.228001, -3.299122, 0.962435, 0.522015, -0.402336, -3.387426, -5.981354, -1.381018, -1.483798, -1.830560, 3.258768, 4.992769, 47.932034, 1.344948, -4.236881, 1.773366, 0.659067, -2.210075, 5.002216, -4.865983, -1.548675, 1.807968, -5.603806, -0.190212, -2.337569, -3.089808, -0.862871, -8.700631, 5.457571, 3.984666, 0.658955, -2.196346, -4.292675, -0.780816, 0.769220, 2.174956, 2.660364, -1.906250, -4.770666, -6.087595, 0.698831, 0.543975, 1.314846, 2.678899, -2.456889, -1.412340, 1.406363, 5.925550, 1.901989, 2.444864, 2.600682, -0.024516, 3.930375, 3.464553, 3.747995, -0.250319, 1.857823, 4.730449, 1.230439, -2.207210, 0.910806, -0.377090, -2.856334, -1.995489, 0.876158, 2.491508, -2.200970, 12.557244, -0.037571, 0.546623, -2.080354, -2.482499, 0.321669, -0.783329, -1.755629, -7.893689, -23.774939, -7.158698, -1.875171, -1.281272, -4.477736, 3.325664, 6.510106, -3.699291, -1.884934, -5.767200, -6.147463, 5.894515, 1.016173, -0.194931, -4.244712, -4.448786, -0.129400, -3.077547, -0.638804, 5.994468, -3.066692, 6.657439, -0.409814, 1.328925, -4.061563, 2.119708, 4.979975, -4.113218, -1.034720, -1.739756, -9.974875, 0.131563, 3.146017, -5.895171, -4.860582, -0.998382, -1.265812, -1.364393, -2.176633, -1.461539, 0.422791, -2.576539, 2.842657, -7.755994, -7.623969, 3.702054, -1.580819, 2.154251, -3.150540, 1.246960, -5.462420, -5.664275, 3.125876, -1.564969, 1.019071, -9.127211, 2.841057, 0.758049, 5.360108, -3.659481, 0.138878, -0.438434, -0.055836, -0.870024, 0.371788, -5.101191, 0.136766, -4.824679, 1.838409, -0.154343, 2.864966, -3.327515, 1.578334, 4.174641, 0.877606, 2.169600, 7.554946, -0.283193, 6.812216, 2.055799, -0.296956, 0.311624, 2.173999, 0.049316, -1.044881, -2.964058, -4.674428, -3.593721, 4.188018, -2.316576, 3.803720, -0.253186, -4.358509, -2.351217, -0.170277, -28.426014, -3.636131, -0.418576, 6.431114, -1.643137, -2.162497, 2.246531, 4.474029, 0.840325, -1.671944, -2.560735, 3.340313, -0.918728, 7.462214, 1.220675, 1.289997, -2.709882, 3.065038, -0.098697, 2.217031, 1.754498, 1.388865, -9.350600, -0.669478, -3.919390, 1.287890, 0.006270, -0.208305, 0.238537, -3.044888, -0.904316, 1.619854, -2.376778, -1.632882, 11.180886, -3.706999, -0.948795, -0.602214, 5.489887, 9.740318, 3.845946, -0.599598, -0.256252, 2.331402, -3.466881, 1.323254, -1.516204, 0.236590, -5.375591, 1.427258, 6.110905, 4.510322, 2.177007, 2.412064, 1.938499, -2.666273, 12.803903, -1.832978, -1.936508, 0.997035, -0.432327, 0.264868, 1.924250, 4.459678, 0.553410, 2.775764, -1.430104, 1.464519, 0.427380, -7.424240, 0.978083, -2.451670, -1.070307, 1.404176, 3.224234, 2.501353, 1.000265, 3.049890, -6.290756, -6.703895, -5.700268, -4.351070, 4.281235, -6.503413, -10.641171, 1.250353, -6.935366, 1.247792, -2.876938, -2.816121, 4.265053, -0.092179, 1.189310, 5.196477, -8.313596, -11.407356, -0.090954, 3.018408, 2.669824, -0.911198, 3.208556, -1.206236, 1.793371, 4.439926, -3.105369, -1.270381, -3.500592, -0.991417, -0.935610, -17.860001, -0.856600, -3.319674, -0.911797, 3.161229, -4.300007, 1.838384, 4.364628, 0.703687, -0.223441, -7.835571, 6.474916, -1.409915, 1.796286, 1.480622, 0.282803, -2.237307, 5.114225, -3.169454, 3.048834, -5.527786, 0.004587, 0.949397, -19.889380, 1.039926, -2.144435, 0.130052, -8.224975, -1.026113, 0.951771, -1.719624, -2.794927, 0.382448, 0.480869, 0.642630, -0.323551, -1.853559, 1.146316, -6.034152, -0.389631, -1.857311, -3.772340, 2.750707, -5.963089, -2.533455, -5.624331, -3.390948, 5.100173, -1.154467, 6.165472, 1.594810, 3.966839, 2.707621, -2.439730, 0.798211, 2.823258, -5.497983, 1.359825, 1.167054, -5.620857, -0.495827, -0.529598, -1.180323, -1.876745, 0.012315, 3.032026, -10.296271, -2.670000, -3.453792, 3.848038, 2.333091, 10.287111, 2.165104, 0.710347, -2.298230, -1.751988, -4.070202, -4.802220, 7.371030, -5.318613, 2.158966, 0.621032, -4.925990, -0.846735, -2.236966, 0.696813, -0.762223, -5.564859, -3.741148, -8.041711, -8.016417, -1.817770, -0.752128, -1.749938, -6.524678, 2.563604, -3.125811, -2.255188, -12.779183, -4.288491, 4.957750, -0.919120, -1.679587, 6.629279, -1.700079, -5.175654, 3.858805, 0.408368, 1.267684, 3.302717, 0.151141, 2.592164, -0.777856, 3.897479, -1.764927, 0.361837, -0.688383, 1.690966, -0.854830, -3.935272, 2.206409, 2.084598, -0.795355, -2.733134, 3.446365, -2.276695, 6.318832, -2.123587, 2.378304, 0.480538, -0.419980, -0.573010, 3.604203, 3.043358, 4.465629, -1.576976, 8.403450, 2.318177, -2.657940, 1.897705, 2.157284, -0.176972, 2.929054, 5.861001, -3.396395, -7.986055, 1.982398, -0.934847, -0.196971, -4.529693, -2.205753, 1.787073, 4.705458, 0.153825, 3.711550, -1.090535, -0.222807, 6.108150, 0.625758, 4.039876, 2.148496, -4.007553, -2.591868, 5.058038, -3.280740, 1.438874, -7.098381, 0.551312, -0.019837, -3.303874, -4.428917, 2.108920, -7.297920, -2.603361, -6.212405, -2.046264, -5.194932, 0.144148, -1.774858, 0.293199, 0.442523, 0.917678, -0.850350, 0.197423, -4.443827, 1.970809, -4.512796, -1.580566, -0.339683, -2.141265, -5.313002, 3.081825, 3.989455, 1.402336, 3.047523, 0.389616, 3.277324, 4.216976, 2.475713, -8.278424, -2.683667, 5.051536, -0.983773, 2.171453, -1.534576, -6.967187, -0.803524, -0.016174, 0.548695, 3.565756, 5.058985, 0.803081, 5.839240, 2.784815, 0.074305, -0.199195, -9.937843, 4.329383, -0.539009, 4.629091, -0.802863, 0.894037, 4.710128, 3.481739, -5.690711, 0.423449, 1.187420, -0.201800, 2.702831, 0.860518, -0.383126, 0.159294, 2.860673, 3.332905, -1.141616, 4.104559, 2.994694, 0.478522, -1.463033, 2.509958, -1.936122, -4.324917, -0.348549, -3.917790, 2.476979, 2.343854, -0.956187, 0.674467, 0.785163, 1.978697, -4.923691, -2.913966, 7.301755, -0.151009, 6.468770, -1.793773, -8.736486, 0.767645, 5.876063, 0.979330, -2.524950, 4.293113, -2.878569, -0.043513, 0.668262, -0.138689, -2.190439, 1.914594, 1.057041, -0.510029, 0.777235, -1.629387, -1.275562, 0.588315, -1.272347, -8.000982, -0.025677, 0.897335, 3.553093, -4.675339, 2.694863, -1.545545, -2.649874, -3.690698, 3.610578, 5.482494, 3.182851, 0.914212, 3.898405, -2.169665, -2.982248, 2.473002, -10.818861, 2.550534, 1.030614, -0.145618, 4.118483, 0.926881, 1.610949, 1.080264, -1.195810, -3.454058, -4.189197, -0.832136, -0.792559, 3.317927, 3.263461, 2.082658, -6.270870, 6.739213, -4.238231, -4.717479, 2.588153, -8.810438, -1.245415, -1.513222, 4.949384, -6.465408, 0.011828, -1.437624, 0.130517, -0.796087, -3.359360, 5.944135, 0.506007, 4.615585, 2.563034, 6.552821, -6.680770, -3.411387, 2.863103, 1.250932, -4.190070, 1.327298, 1.261376, 2.966345, -1.287480, -5.242377, 0.985963, -1.411199, 0.133332, 1.792124, -0.832668, 1.154158, 0.964184, 3.706770, -0.000854, 2.903304, -0.895213, 4.000095, -3.686526, 3.213437, -4.706445, -1.308617, 4.113848, -2.737977, 1.562001, 5.607126, 0.960042, 1.433525, 6.983365, 0.274547, 1.965000, 9.138268, -0.436433, -0.454524, -4.457383, -2.280589, 4.140254, -11.978215, 0.293515, 2.794824, 4.216743, 2.579867, -2.556382, -2.020272, 5.623112, -5.271835, 0.387230, 0.836283, -1.495048, 2.541495, 2.930545, 2.098567, 5.748567, -4.172064, -5.735911, -2.475516, 1.435537, 2.127018, -0.711296, -7.948764, 3.063461, -0.742258, -2.206010, 1.454916, 1.427907, -2.728484, -2.138403, 2.024133, -1.875692, -1.830184, -0.847052, 0.247767, 1.120336, -0.611148, -1.962781, 1.963801, -3.417486, -1.224645, -3.051069, -6.072777, 0.071024, -1.812450, 0.394084, -7.601513, -0.706624, -1.862799, -4.596753, 4.606680, 3.831007, -3.075969, 0.801902, -4.553856, -1.530756, 11.482397, -1.116637, -0.049839, -5.557608, 0.594015, -0.020668, -0.304966, 3.494741, -0.438330, -0.335975, -3.798270, 2.064097, -2.830903, -0.180927, 1.468278, -0.047076, -2.719292, -8.892654, 3.792854, 8.481908, 0.143010, -7.642408, -0.365793, 1.744668, -11.256462, -4.066974, -0.342073, -2.609741, -0.020335, 5.202221, -10.268240, -2.336538, -3.756663, -5.137491, -3.132803, -1.599756, 0.459914, -5.824290, 6.358953, 6.547798, -2.606397, -0.500944, 5.990815, -4.464989, -5.334608, 3.055580, -2.113880, -4.319254, 0.109849, -1.435706, 1.236413, -0.659347, 4.209237, 27.808695, -0.735722, 2.542649, -2.888971, -2.677156, -2.106372, -0.176389, 3.084910, -6.620935, -4.989196, -4.167188, -0.450144, 5.561008, 3.147554, 0.646565, -1.812273, 0.136941, -1.150602, 3.840627, 2.770045, 2.407038, -1.792738, -4.075364, 2.567823, -0.405188, 1.471137, 2.095017, 1.875115, 0.781932, 0.154044, -1.027020, 2.534902, 2.466861, 5.860183, -1.624078, -4.906713, -0.614137, -1.109079, -30.210474, 2.965029, -5.656622, -1.602059, 2.151850, -2.424324, -1.133597, 3.602876, -1.374817, 1.312690, 6.600981, -3.711915, -0.776028, -0.783309, -0.032203, 0.004787, -0.573705, 1.131783, -2.909537, -1.993327, 3.583283, -4.179435, 5.197440, 3.056979, -6.708999, -1.780574, -1.397572, 3.321905, -3.176260, 1.995708, -1.142323, -1.124792, 0.443718, 0.178960, 1.654835, 4.147129, 1.667449, -2.683200, 5.148854, -3.127443, -0.269485, 0.740664, -1.416041, 3.067853, 0.884041, -1.115872, -1.382064, 1.150366, -7.391175, 3.878388, 1.055246, 5.294684, 3.367005, 1.458345, 0.444658, 2.003294, 2.478994, -0.889095, -0.195013, 4.370057, -2.763848, -1.210211, 2.195881, -1.175749, -3.720346, 7.946991, 1.573793, 0.570961, 1.999400, 2.635027, 0.585716, -4.593002, -2.413884, -0.749115, 2.901417, 5.344875, -6.515359, -0.041352, -5.578712, -0.287062, 4.287720, -4.150388, 0.676597, -4.173187, 4.304606, -1.793098, 2.079561, 0.289218, -0.746309, 3.382319, -0.381789, 4.965404, 2.722382, 2.368737, 7.892712, -1.789875, -1.298005, 1.929076, 2.930258, 1.172013, -1.393607, 1.978461, 0.344213, 1.240076, -1.066240, 3.754827, -3.045815, -1.633300, -7.885127, 1.558708, 0.268276, -0.346626, 4.239035, 4.048144, 4.338556, -1.356537, -3.766309, -3.576638, -3.384286, 1.989253, -0.840723, 7.379016, 4.081261, 0.038133, -0.412059, 1.548459, 0.050730, 10.584702, 2.185298, 6.399117, -0.611670, -2.343023, 1.482179, 3.325804, -1.125875, 6.449540, -1.822189, 3.688849, 3.485385, 7.524541, -1.358806, -0.681207, -0.228230, -1.807666, 0.689905, -4.343250, -1.711935, 6.821198, 0.065716, -0.855093, -4.671536, 4.853776, -1.215308, -3.666353, 2.930653, 1.096143, 0.179448, -5.428270, 5.465977, -1.348175, 1.068519, 5.216844, -2.741594, -1.862931, -1.070844, 1.933743, -0.791765, 3.389639, -7.906260, 0.136756, -5.475089, 6.345721, 1.918432, -2.397303, -2.369119, -0.944957, 3.099753, -7.967153, 0.263381, -1.116860, -3.724980, 6.644803, 6.030805, 3.247872, -0.263794, -4.602955, -2.965417, -1.337570, 3.773480, 0.421972, 5.234677, 3.752219, 7.417940, 3.028159, 9.032824, 1.142330, 0.371421, 1.237620, -2.001766, 4.180665, -4.384745, -7.251889, -0.973868, 2.148206, 2.667418, 2.097704, 5.528183, 0.712180, 0.229946, -4.225259, 3.718240, 0.981887, -2.992356, 3.732011, 1.262913, 2.486276, -0.845420, 4.692969, -3.100411, -11.288132, 3.064300, 1.208451, -2.750242, 1.574212, 3.747189, 3.041973, 11.495134, 0.422142, -2.928797, 1.352937, 3.245569, 1.863270, -0.466618, -3.322316, -4.923002, 0.221532, 0.045353, 8.562343, -3.430971, -1.047455, -2.337644, 1.798944, -3.934470, 8.707403, 2.670470, -4.344477, 3.374244, 1.475771, -3.932935, 3.965106, -3.995053, 4.167596, -2.276040, -0.591933, 0.932282, -1.462290, 2.448748, 5.785700, 0.220345, 0.982267, -0.730691, 1.598619, -0.951081, -1.381845, 6.758685, 0.056153, 1.316145, 1.522889, 2.444357, 2.326614, 0.808057, -2.346661, -7.242181, -2.112781, -3.223854, -4.097082, -3.153551, 3.551127, -0.689712, 2.171698, 2.622624, -1.431949, 4.260911, -1.099790, -2.948601, 1.754508, -4.628411, 2.743903, 2.132607, -16.845716, -1.369676, 5.490612, 1.289092, 1.616659, -2.485617, -3.701179, -1.843138, -0.944958, -1.893737, 13.684652, 0.805028, -2.949577, 2.342299, 4.246860, -3.759459, -1.838323, 3.088591, 3.822837, 0.593933, -5.586031, 0.837919, 9.085196, 5.487856, 0.007424, 3.114270, 2.298352, 1.340533, 1.124153, -0.341179, 0.106273, 4.979677, -4.125438, -0.993304, -3.155438, 2.359821, 1.202263, 1.514211, -3.757470, 0.575695, 4.774167, -16.632801, 1.342419, 4.075314, -2.214773, 2.418203, 4.071110, 2.334437, -1.676125, 8.703053, -3.765240, -4.209192, 5.580455, -3.874269, 2.419491, 2.460470, 1.049202, 1.818905, -1.270050, -2.687582, -2.777706, 1.905133, 0.056396, -1.378634, 2.904002, -1.947071, -0.834215, -1.901032, 6.773449, -1.063208, 7.024649, 1.117981, -0.367361, -1.740446, 0.913254, -3.286130, -1.201091, -1.921104, -0.877035, 1.950935, 1.215519, -1.095694, 0.836712, 0.409856, -0.226469, 2.087629, -1.558773, 1.495108, 3.269367, 3.330146, 0.768648, 3.438131, -4.277113, 0.945814, 5.588975, -6.241295, -3.621197, -0.107132, 2.572536, 3.045289, 7.745201, 5.807571, -1.651044, -0.747606, -0.376589, -1.412276, -3.816433, -1.707250, -3.178647, -1.054148, -3.442186, -1.908579, 0.998634, 5.953327, -0.953441, -2.500016, -0.389423, 2.296786, 2.902000, 2.559777, -4.143809, -0.866035, 0.418335, -1.576818, -1.394512, 2.981925, 1.896816, -2.433038, -1.609794, 0.911857, 2.467139, 1.988222, 1.968848, 1.828560, 3.154080, -1.898295, -2.381668, 0.538234, -2.226501, -3.885483, -0.569491, 3.934917, -6.430278, -3.840080, 4.621070, 0.104495, 0.085714, -3.136611, -0.288041, -1.969913, 0.801661, -0.091117, 1.770883, 7.051067, -3.782157, -3.109600, -3.490250, 4.312650, -0.281791, -0.785728, -2.446904, 1.629344, -8.615851, -5.223053, 7.423417, 0.729529, -6.965634, -3.742949, 1.187336, 2.706056, -5.362820, 0.179687, 0.532849, 4.417111, 3.949409, 0.610486, -0.558789, 0.813596, -1.513216, -3.359447, -0.432475, 2.095533, 0.159239, -0.333785, -2.211792, -0.047680, -4.235281, 1.416200, -6.725245, 2.321989, 2.057713, -0.360040, -4.618135, 8.053253, -0.976249, 2.382700, -4.443527, -4.651454, 2.797418, -3.174040, 2.587232, -2.508294, 1.493909, -2.535459, -0.462006, 6.971141, 2.623497, -1.583797, 1.680959, -0.107594, -2.050336, 0.336694, 1.394572, -1.438382, 0.697090, -7.877173, -2.569103, 0.811040, -0.577080, 3.773080, 5.100289, -1.005922, -4.035639, 5.199240, 0.569351, -2.510661, -0.789682, -2.061459, 0.951691, -3.258404, 0.793776, -0.699546, -3.812907, 4.155831, -0.691320, -3.701868, 5.559592, 6.894836, -2.198950, -2.418447, 0.557332, -3.887216, -0.409616, 0.671317, 2.103461, 0.933136, -2.280445, 4.725842, -3.020412, 0.854076, -2.124374, 2.345908, 0.779380, -2.207812, -0.975366, 1.395538, 5.174310, 5.707728, -2.258522, 1.107084, -1.178677, 0.965657, -4.074826, -2.210460, 43.621979, 2.087230, -2.555933, 1.138599, -1.867495, -2.005657, 2.006593, 0.903467, 2.500440, -2.682779, 6.033059, -3.445567, 0.342344, -0.145648, -1.879602, -1.052540, -3.399618, 0.614843, -0.783479, 3.685495, -1.959264, -3.259564, -2.239694, 9.379510, 3.394999, -2.434350, -7.337447, 7.274628, -0.171698, -5.943968, -1.474846, 5.444496, 1.095178, -1.766426, -5.691527, -5.506092, -1.270262, 0.313055, 0.721622, 5.188374, 1.902969, 1.205201, -2.170613, 2.897128, -5.421397, -0.611056, 5.514519, -0.520266, -1.261025, 2.060696, -0.969377, 1.961746, -0.733028, 3.820837, -8.300204, 1.480688, -0.752736, -1.417715, -5.781563, 0.288530, -5.425626, -2.840594, 2.333462, 0.768530, 7.000396, 1.959516, -0.236193, 3.105055, -1.981859, 0.149225, -2.736589, -0.641271, 0.997428, 2.710630, -2.386521, 1.275755, -1.025446, 1.094309, -2.452138, -1.884652, -5.242512, 2.956650, -2.594950, 0.358654, -6.993193, 0.821677, -1.042855, 2.405131, -1.163304, 2.789098, 4.814901, -0.783459, -2.994322, 2.965991, 1.153805, 3.003690, 0.741842, -2.705556, -0.624102, 2.207225, 5.139067, 4.516562, 9.528468, 10.722857, -6.014948, -1.468108, -1.515384, 1.200421, -8.417472, -1.479154, -4.700307, -27.693583, 19.278984, 5.617891, 7.485734, -2.536120, 0.713907, -2.703150, 1.188608, 0.960568, -1.372093, -0.877927, 0.092983, -0.725186, -5.076651, -0.030141, -3.103490, -2.449842, -0.183140, 2.404788, 2.396079, 3.170067, 4.591356, 8.881496, -0.642793, -3.631530, -0.544090, 2.360912, -0.504165, -0.405809, -3.042205, 1.706133, 1.847016, 2.333493, 0.757474, -2.129310, 0.796120, 3.211559, -5.078777, -4.502806, -0.481662, -3.884932, 11.689110, 0.064339, 2.081212, -2.165923, 0.583592, 2.662692, 0.726894, -4.564513, 7.464515, 1.626835, -0.850533, 0.909967, -2.349465, -2.608305, -1.992356, 6.136680, -6.013201, 5.306615, -1.351115, -6.117376, 2.164393, -0.883700, -0.201548, 0.697047, -0.577786, -3.289770, 2.176913, -0.115851, -0.672782, 2.007150, 0.863120, -1.828321, 1.254452, -0.156242, -4.639762, -0.776801, 9.015604, 0.961942, -4.866060, -4.236791, 1.183998, -2.264598, 0.007299, -4.196472, 4.446774, 2.381474, -1.194094, 5.165797, 4.621684, 0.407603, -0.077693, 1.476943, -1.108431, -44.966984, -3.110889, -3.256585, -1.450435, -4.775122, -1.218151, 0.868422, 5.199721, -2.379660, 3.885153, 2.122914, -4.509441, -2.694862, 2.642570, -8.154598, 1.392721, -0.582800, 5.900602, 0.151563, -0.997430, 3.602529, 2.306926, 5.268566, 7.866295, -2.492254, 3.914945, -6.835969, 3.688162, 3.279676, 5.574743, -0.151922, -1.066244, 2.785165, 5.100389, -5.511202, -9.268009, -2.915344, -2.801649, 4.430429, -0.735414, -0.181910, 3.236409, 26.079983, -0.761731, 0.653495, -3.007643, -5.631822, 2.289257, -3.494029, -0.192737, -0.513931, -1.746571, 1.399262, 4.482061, 3.125358, 0.241923, 0.804616, -3.433713, -2.345522, -5.599613, -3.405391, -0.259379, 2.662636, 5.827351, 0.032513, 9.715652, -4.794389, 1.433447, -5.983769, 3.320502, 0.101204, -0.489180, -2.867526, -3.387309, -0.163811, 2.514033, -3.596303, -0.992485, 4.348735, 6.369435, -2.752752, -4.915279, 0.872076, 3.233520, 2.918803, -1.060405, 8.784516, -1.047169, -1.258074, 8.113674, 2.097831, -2.211263, -0.356771, -2.771562, 2.111918, 3.434057, -10.373298, 3.225255, -2.546052, 5.178988, -3.712509, 0.466352, 5.595999, -1.902481, 2.197184, 8.664577, 3.278957, 0.887089, -3.924023, -0.009009, 0.493876, -0.462941, -1.171967, 1.827280, 0.585090, -4.594781, 4.302927, 2.931898, -18.763901, 0.288057, -1.013090, 2.084519, -7.164950, 0.544081, 1.258278, -3.086405, 0.951399, 4.506777, 1.594531, -0.798212, -2.319204, 1.998325, -1.307129, 0.050375, 2.305047, 3.755739, -0.565489, -0.703969, 4.700220, -0.752597, 0.104114, -3.564498, 0.087325, 4.166714, 3.211349, -5.525641, -2.494095, 4.287768, -3.793372, -2.416002, -3.818194, -6.707589, -5.495049, 0.908813, 6.420833, 1.880672, -4.220618, -1.354073, 0.669962, -1.492136, 1.912206, 6.716006, 1.533039, -2.752949, 2.778269, 1.990574, 1.355981, 1.650745, -2.791111, 2.383833, 3.117180, -1.755023, -2.484791, -1.427666, -2.083348, -0.351782, -0.537250, 2.912282, 4.657736, -0.250751, -3.693070, -2.533830, 0.775283, -1.919510, 1.034829, -2.565387, -0.081199, -0.717182, 3.154967, -0.808507, 3.005673, 1.961030, 0.114304, 9.701428, 3.617954, 1.700600, 0.146973, -0.677872, 3.924407, -0.463155, 1.150887, 2.945447, 2.020185, 3.547239, -2.598321, 0.299821, -1.293790, -4.763089, -3.184764, 0.086383, -1.649983, -0.637856, 7.010010, 2.068723, -3.671678, 0.025033, 2.713188, -0.032102, -5.144317, 1.060371, -2.585963, 1.313534, -1.680610, 1.359738, 0.685065, -4.175454, -9.532748, -6.857881, 3.258746, 3.817967, -0.295498, 2.200130, 0.354658, 4.019381, 4.236646, 6.433453, 0.648714, -0.163894, 4.691657, -8.109050, -2.046292, -4.738183, 2.442103, -5.302021, -1.401157, -3.955161, -4.316630, -1.248632, 9.769939, -1.104696, 2.640380, -3.370728, -2.332601, -3.779285, -3.127338, -2.534174, -3.135659, 2.335110, -1.644436, -0.781360, 4.554105, -0.532770, 4.013941, 4.089730, 1.992979, 0.834781, -1.482325, 0.693270, 2.129113, 4.968291, 1.454291, 4.886137, -0.492988, 1.674870, 1.228140, -1.049799, -3.154610, 0.665178, 0.718149, -1.777335, -2.863971, 1.914558, -1.051247, -7.015462, -4.285183, 2.885106, 1.484717, 2.275805, -7.256853, 7.712085, -0.270096, 0.680072, 2.501477, 5.181439, 1.880382, -4.266192, -0.881489, -3.897562, 2.306980, 1.042805, -1.734362, -3.418523, -2.520693, -0.932934, 0.600804, -4.026608, 2.140552, 0.739133, -1.055242, 10.754318, -1.324455, -9.931609, -6.909337, -0.735538, -7.770214, -5.530866, -5.511956, 0.430703, 1.215536, -0.037053, -2.160068, -7.814392, 7.077866, -1.544036, 1.964725, -5.976818, 3.433081, -1.595420, -0.050951, 1.447312, 2.200961, -1.437491, 8.058798, 3.886115, 1.098245, -5.695917, -3.621947, -0.175427, 1.346340, -2.712342, 0.758565, 2.608576, 4.406272, 2.122162, 0.368007, -3.709307, 3.653518, 0.282671, -3.528298, 1.833689, -2.242323, 2.329140, 3.200709, 0.361647, -9.457127, 0.740096, 0.599537, 0.044123, -4.082075, -1.136967, 2.845206, 9.539391, 4.131850, 9.826702, -1.416007, 4.986438, 1.404218, -0.860064, 1.486623, -6.308336, 6.734470, 2.890507, 13.634391, -1.196998, 4.820277, -0.962307, 8.095446, 4.239813, -4.517473, 5.930497, 3.918408, 6.131054, -2.309074, 1.308818, -5.062254, -0.789280, -0.660698, -2.314359, 2.722275, 0.739409, 2.175684, 3.702787, -1.253475, -2.612329, -3.018609, -4.658152, -6.208450, -3.847937, 3.902117, -4.562454, 1.264555, -1.178211, -2.110918, 6.949625, -2.628705, 0.764311, -4.272890, -3.546896, 5.147094, 3.351293, 0.902968, 2.422658, 0.061787, -5.938614, 5.213331, -5.518444, 1.789501, -3.263019, 1.248217, -0.069851, -3.463836, 10.676457, -0.421348, 4.053063, 3.838572, -0.964464, 0.299519, 4.429276, -3.669645, 8.816913, 2.253774, -2.937450, 3.031363, -0.441367, -0.467278, -2.440284, 0.437956, -2.669316, 0.710339, -1.785235, -3.055763, 4.339170, 2.426236, 0.704910, -10.585463, -3.648029, 3.718344, 7.493590, -0.551805, 3.138997, -3.172817, 4.855944, 7.529528, -0.990349, 0.725348, -3.289269, 1.577000, -5.270145, -2.439614, 25.519426, 0.531986, -2.755472, 2.816146, 2.306898, 1.787177, -1.969297, -3.771940, -4.072698, -1.891043, -0.889094, -2.567592, -1.333927, 1.758486, 2.561443, 3.690232, 1.029738, 0.161567, 3.351619, -2.916280, 0.093553, -3.761292, 2.797852, 3.100548, -2.871847, 3.449701, -3.052869, -0.397220, -7.578485, 4.339926, -6.886413, 3.227925, 1.463515, 2.742254, -3.532009, 1.450264, 9.489766, -1.036751, 1.478379, 3.071886, 10.161637, -0.079840, -2.863888, -1.750112, 3.457873, 3.459886, 2.706931, 3.321663, -1.270632, -5.748805, -1.165622, -0.063943, -1.805338, 9.535586, -2.524263, 1.099205, 0.840550, 1.529488, -1.175432, -0.004264, -0.098393, 1.532492, -6.314032, -0.103321, -0.071644, -1.736877, 5.621130, -4.624400, -0.792513, 3.783909, -3.232745, -3.458905, 1.588551, -3.174227, -3.180984, -2.934828, 2.062442, 2.622324, 0.271381, 0.591527, -0.733445, -3.398714, 7.095342, -5.738572, 0.362881, 0.205072, -3.085334, -1.162128, 2.990404, 1.570259, -0.308622, 1.584242, -2.891890, -1.574553, -0.806329, -3.489795, 0.363795, 0.451270, -3.467881, -2.665737, -0.829700, -3.262831, 2.531505, -1.448635, -11.764088, -1.527931, 7.177086, 6.047965, 2.480182, -2.124264, -5.441585, 3.992351, 3.107618, -4.064499, 4.343797, 0.738629, -1.994727, -3.738751, 0.183636, -2.504145, 0.066069, 13.811249, -0.786826, -0.128738, 2.956079, 3.258192, 4.277177, 1.896445, 2.342017, 2.951867, 1.038004, -3.546036, -1.966871, -0.350206, -1.062556, -3.251185, -1.717144, -1.744599, -1.011379, -1.543622, -14.860034, 1.587226, 7.304573, -8.171671, -2.431875, 16.159142, 1.283842, 0.719961, -1.129456, 5.108453, -0.519006, -0.805467, -6.985523, 0.856803, -12.640044, 1.878594, 0.034747, 1.530180, -1.908082, 3.374542, 1.808994, -5.683702, -0.402296, 8.026281, 7.437514, -0.215147, -2.404820, -1.402900, -1.002659, 3.748481, -4.154411, -7.063788, -0.248710, -2.645182, 1.603671, -1.907478, 0.039413, 4.430655, 2.428565, 2.067001, 4.126395, -1.570520, -0.347207, 4.200280, 3.175920, -3.679209, 1.716497, -2.141376, 0.908101, -2.319343, -2.115519, 2.724495, 0.881507, -2.219605, 1.227373, -0.623095, -5.680072, -4.515111, -4.280621, 0.638654, 2.864280, -1.788719, 3.186849, 0.808680, -0.938253, -1.928522, -2.190552, 3.152059, -0.731879, 2.886727, 2.629760, -0.052954, 2.126023, 0.956519, -1.393854, 0.348381, 11.050415, -0.480058, 5.922142, 1.182473, 1.413033, -0.197702, 4.039019, -0.146877, 9.944363, 2.529500, 2.303541, -0.278307, 0.550621, 1.555473, -6.140727, -4.189098, 4.228193, 0.920698, -6.579219, 1.470899, -3.844724, 2.822774, -1.295956, -1.055949, 0.610222, 4.895357, -2.428566, 3.989449, -0.780607, 4.248460, -6.552973, -2.184941, -1.064698, 2.079311, -0.336459, 9.744741, 0.184509, 1.335299, -2.066525, -1.792264, -0.921902, -1.379273, -1.854074, -1.279663, -6.862578, -1.625089, -0.318789, -3.344024, -7.887208, 0.634548, -1.720671, 4.806617, 16.197113, -2.255513, -6.082765, -2.146448, 4.435984, -3.256941, 1.366697, 4.858845, -0.249602, -2.045089, -1.747685, 2.477941, -8.705357, -3.707039, 8.628284, -1.643217, 0.556386, -2.368690, 4.593940, -0.541282, 5.100232, 3.154782, -20.099430, -5.723884, 8.678602, -0.261339, -6.657823, 1.785186, 3.918643, 1.058163, -3.577414, -2.781995, 0.277656, -1.206921, -3.431406, -0.371179, 0.062091, -2.803387, -2.210452, -3.624687, -4.868351, -1.711763, 3.888983, 4.381881, 1.861924, 3.465306, 0.802314, -0.696466, 0.556550, 4.886981, -0.335794, -0.826100, -6.531191, 3.034495, -6.029027, -1.254580, -8.929978, 0.385249, 0.460604, -1.932355, -2.915896, -3.936714, 0.725031, -5.863996, 1.867756, -7.317173, -1.208842, 4.265705, 11.616241, -3.310569, 2.551403, -1.606800, -0.118172, 2.844477, 5.051156, -1.455490, 5.397809, -0.168303, -6.603184, 3.794992, -17.872410, -0.665075, 2.071692, 3.814082, -0.037632, -2.439398, -0.318887, 3.382610, 0.831689, 9.479648, -5.347363, 1.336866, 4.424902, -1.062037, 4.349950, -1.505998, -0.435761, 2.137821, -2.412280, 1.229777, -1.909602, -3.044448, -3.501979, -0.104246, 2.117775, 3.107763, -0.013128, 3.941957, 4.532784, -0.821736, 0.553934, 0.840146, 1.502584, 4.154314, 0.341067, 4.156440, -2.037416, -4.105920, 1.039137, -1.602859, -5.797848, 4.200612, -2.055606, 2.506652, -2.133896, 3.504039, 0.281614, 0.532739, 1.376643, -0.826743, -5.112361, -1.149567, -1.984722, 3.078268, 9.968194, -2.850210, -3.878800, -0.690165, -0.403210, -7.633935, 1.436931, 3.689845, 2.828761, -5.717082, -2.194066, -2.922075, 3.165498, -11.167392, 1.688192, -5.524090, 0.993366, -1.875577, 2.150835, 11.409101, -6.590968, -5.735646, -2.350946, -6.051579, 0.106657, -4.557941, -0.689102, -1.852935, 1.467756, 2.074039, -1.319408, 1.335555, -2.719368, 2.661831, -2.709215, -0.221642, 0.270116, 1.247337, 0.644675, 3.392402, 3.093399, -4.604951, 8.581511, 3.129878, 1.265846, 1.074514, -13.202533, 4.650165, 2.780230, -2.543931, -5.107362, 0.249189, 2.332117, 0.991129, -9.436378, 1.508680, -5.965739, 2.250675, 0.734286, 6.917059, 5.824757, -1.799859, 6.732748, -2.040524, 0.324983, -4.433781, -4.083701, -2.066100, -0.433014, -0.562280, -5.995659, 2.733306, -0.871792, -3.416987, 3.637951, 1.765604, 6.477674, 2.800692, 5.118347, 4.541334, -2.603925, 1.293608, -1.748612, -5.859107, 2.975294, -4.651271, -1.346864, 2.566788, -5.880526, -0.439273, 8.246215, 1.767323, 1.779419, -1.635352, -1.046876, 3.886189, 0.367515, 1.263599, -2.242850, -0.745562, -0.698594, 0.991128, -3.147751, -1.904813, -2.517425, 0.388064, 2.100670, -0.625413, -2.936193, 7.212622, 3.495085, 0.364892, -7.093049, -0.563480, -5.357414, -6.325642, -0.105547, 3.423231, -0.726191, -1.064512, 0.565287, -3.765018, -1.525965, 0.711336, -6.067619, 0.644387, -1.345242, -1.747090, -9.275399, -2.579576, -3.292126, -2.812819, -1.319786, 5.942580, -8.569520, -0.334647, 2.571727, -0.716851, -0.264076, 0.306208, -2.388574, 1.259972, -3.223534, -1.725168, -5.054450, -6.798425, -2.802125, -9.758092, 3.264822, -4.029863, 1.222392, -6.792177, -5.509841, 3.333195, 1.595992, -2.485954, 3.418053, 3.150728, -5.541345, -1.010569, 1.818943, -6.069419, 0.080386, 6.827480, -0.565225, 0.868441, 0.249676, 0.420381, -1.855428, -2.638492, 0.961045, -5.607455, 3.067363, 4.906213, -0.230991, 5.313849, 0.373842, -2.051629, 5.132754, 1.507394, -0.034538, -1.821275, -5.229528, -1.681695, 0.247036, 4.154735, 1.013717, -2.943813, 1.753060, 0.640790, -5.692806, 4.646099, 0.711496, -6.592886, 0.962532, -1.473296, -1.644924, 7.494703, 2.794217, 2.518434, -5.441831, 1.128450, 1.821951, -3.574837, -2.428346, -8.625550, 7.606143, 1.294082, -7.733930, 2.006055, -1.835669, -0.347749, 0.333535, -2.273628, -4.231286, -6.758154, 2.482530, 3.134415, 7.189015, 3.064994, 1.558125, -0.783350, -2.527781, -1.866463, 4.669818, -1.330083, -1.444759, -0.930375, -0.798592, -3.410083, -0.274624, -2.789313, -1.357198, -2.144307, -4.481643, -2.742354, 0.240901, 1.413023, -5.612537, 6.231907, -4.355688, 1.172392, -2.344915, -6.774537, -2.949419, 6.475248, 1.293685, -0.787925, -0.651137, 4.311115, -3.033743, 4.079912, -0.069083, -4.581884, 1.495375, -1.369064, -3.298662, 4.778340, -6.396335, -0.965669, 2.149769, 1.754473, 0.665721, 1.364326, 0.428260, 0.394718, 2.100190, 3.103011, 2.281635, -3.107227, -2.661521, 5.721916, 12.464497, 2.350211, -1.664418, -2.115266, 1.659379, 0.040190, 4.463559, 2.570813, -4.260760, -0.914611, 3.548350, 1.413229, 0.150703, 0.571228, -5.263054, 2.867812, 5.113122, -3.562930, 1.321959, 1.296617, 1.840360, -0.354465, -0.841742, -1.386047, 0.234092, 3.117925, -5.204442, 4.431318, 1.275501, 0.174241, -3.576902, 3.565288, -7.621102, 0.725864, 0.208606, 2.634654, -0.689891, -5.271073, 4.451200, -0.051369, 6.495808, 1.571615, 9.977846, -0.262327, -1.900775, 1.561583, 2.607460, -2.524343, -2.247475, -3.985308, -5.154519, -1.082297, 3.086911, -1.647472, 0.552256, 1.241718, 4.260769, 2.855896, 3.581069, 2.182872, -1.281816, -3.166152, -3.134234, 3.630118, -7.424340, -0.392661, 4.074488, -0.762999, 3.409889, 2.010115, 2.272844, 3.770877, 1.274487, 2.700471, 0.583192, 1.880383, -1.851212, 1.919418, 6.667684, 0.557479, 4.419869, 2.297357, 1.687879, -1.395794, 1.187495, -5.052061, -3.167101, 4.399576, 2.319478, -2.672449, -4.151639, 5.109945, 2.622741, -3.191033, 1.350803, 1.247117, 1.523707, 1.524731, -4.275401, 4.144188, -3.426904, 1.059611, 1.229868, 1.717144, 3.219443, -1.379574, 6.763114, 4.209159, 4.669989, -1.552919, 1.956590, -3.063505, -0.818876, 0.398282, 3.045617, -3.467905, -1.740873, -0.328541, 3.964144, 2.526994, -3.503591, -0.354808, -0.716926, -3.544482, -1.276911, -3.532305, 0.649438, 2.690775, -3.869832, 4.179513, 2.495278, 2.825523, -1.422859, -1.275892, -1.227360, -5.181144, -1.844231, -1.535353, 1.778720, 0.579536, -3.280610, 0.543116, -1.489961, 1.316825, -2.299914, 0.209243, 0.453135, -3.278073, -3.260607, -0.946968, -2.961464, 3.596917, -2.578865, 0.882547, -1.046095, -0.429423, 0.850637, 4.075096, 1.123888, 3.178572, 4.054078, 2.019214, -0.743886, 5.753891, 2.066515, -2.333024, 6.216573, 2.248173, 4.755955, -3.988805, -0.547319, -3.821634, -0.701325, -2.453382, 2.735726, -0.528919, -0.634328, -1.279562, -1.031389, -2.924713, -0.179029, 42.316631, 3.399495, -0.467713, 3.538526, -1.384002, 3.893143, -0.334801, 11.199867, -2.195855, 1.888944, -4.320056, 0.755040, 1.081292, 10.907355, -3.523884, -5.846659, -2.079819, -0.271239, -3.652007, 2.723127, 3.150651, 1.373014, -4.964145, -4.856517, -1.054992, 2.489006, 4.521762, -0.100313, -1.749521, 0.557087, 1.873798, 1.777753, 0.061108, 1.141199, -1.867965, 0.973980, 3.827049, 5.733145, 0.327881, -4.415548, -2.686085, -3.498946, 9.935287, -4.941910, 0.530118, -4.953368, -2.943155, 3.611089, -2.938747, -4.140371, -3.059306, 1.901671, 2.769300, -1.015664, 0.834808, 7.847197, 2.926960, -0.200670, 5.732471, -5.167188, -2.899392, 3.268939, 0.028522, 1.939959, 1.543525, -1.434285, -2.884543, -2.542636, -2.979258, -1.692775, -2.327815, 0.393680, 2.332595, 0.512365, 2.758076, -0.678760, 5.337368, -0.521859, -3.174936, 2.157091, 7.742205, 3.659091, -0.176012, 2.298924, -5.640631, -0.399329, 4.093763, 0.591057, 3.205168, -2.254110, 1.035557, -1.663897, -1.972889, 1.000074, -0.450898, 3.438661, -0.413361, 2.912843, -2.715726, 2.627237, 9.346821, 2.512219, 2.022259, -3.861986, 3.279199, 4.369876, 6.735238, 3.659325, -2.113575, 3.877972, 1.938882, -2.754222, -2.302468, 4.789912, 0.175966, -1.774472, -0.481663, -1.509986, -1.885745, 0.840144, 3.241411, 0.086078, -1.011573, -2.879265, -5.263968, -1.294606, 1.292749, 2.220772, 34.629295, 0.815042, -3.064621, -0.799082, -5.806960, 1.333278, 1.967903, -9.410216, -3.309076, -4.448856, 7.331085, -3.153900, -1.167770, -2.164169, 1.891395, -16.319754, -1.391677, 1.153834, 3.318664, -5.902040, -4.184224, 0.999345, 1.227702, 2.622899, -12.610269, 0.547799, -0.841228, 3.554890, -3.054221, 1.191280, 2.858200, -0.107742, -1.796140, -5.167519, -2.952243, -0.991334, -1.160363, -3.391056, -7.776468, 4.159617, -3.535255, -2.583153, -2.086859, -2.986647, -1.117018, -0.348580, -4.668072, -1.983395, -4.534790, -5.013614, -5.024656, -3.032631, 3.231455, 6.135790, 4.552684, 3.503971, -0.114595, -2.233629, -5.283627, 3.570597, -4.535796, 1.975995, -1.817757, -2.049031, 1.648561, 5.248970, 4.141850, -2.243590, -5.773128, -1.633623, -2.526146, -2.853397, -3.678516, 3.093559, 8.550698, 0.936637, 0.071512, -4.433811, 3.393233, 22.987564, -1.477979, 0.435597, 6.149341, 1.090026, 2.535240, -1.342996],
+	"phi3.5:latest":        [-0.673146, 1.773961, 1.383936, 1.196396, 0.803903, 0.041233, -1.880727, 0.656677, 0.414660, 0.618172, -2.137454, 0.863581, -0.927540, -1.059571, 3.178715, -0.531555, 0.528022, 2.207068, -1.345677, -0.733583, -1.960500, 0.252776, 1.050998, 4.098019, 1.398190, 0.633678, -0.285714, 0.178222, -1.715670, -0.595387, 0.283730, -1.291529, -0.118760, 0.233339, 1.325423, 0.156227, 0.816882, -0.922595, 0.362475, -3.944388, 0.326797, -3.158161, -0.732985, 1.371407, -2.308407, -1.135303, 0.659083, -0.910535, 0.900130, 2.807716, 0.168302, -1.908314, -0.816546, -0.732243, 2.254863, 0.547679, 1.224704, 0.182054, -0.245425, -0.152709, 1.269749, -0.729958, -0.324072, -4.141424, 1.497815, -1.658101, -0.067268, 0.341385, -0.418615, 0.548528, 0.885368, 1.378493, 2.714428, -1.364134, 0.758718, -0.285305, 2.441447, 3.092359, -0.394262, -4.053423, -1.546375, 1.188800, -0.722282, -1.030131, 0.868250, -0.742288, -1.632083, 0.561935, -1.452416, 0.438927, -2.190960, 0.433576, -0.217896, 0.200255, -0.936510, -1.232224, -1.273036, -0.485626, -1.580620, -0.755826, -1.929564, 0.465290, 4.440741, 3.027395, 1.076537, -0.644454, 0.186216, -2.798541, 0.465393, -0.019308, -0.310881, -4.531783, 2.101488, -5.035508, 2.224858, -0.011239, 0.521220, 0.857749, -0.375949, -0.357070, -4.447475, 1.367533, -0.899427, -0.499188, 1.973059, 0.953046, 2.269581, 3.454970, -0.048320, 0.186712, 0.723754, -0.597165, -0.704518, -3.137668, -0.235396, 0.612854, -0.658817, 0.153629, -0.792131, 1.470392, 2.165122, 0.188555, 1.302444, -2.575915, 0.124427, 0.352611, 0.320023, 2.944951, 4.222467, 1.244262, 0.798227, 1.428810, 1.982603, -0.927159, 1.607864, 1.340480, 10.285347, 1.991316, 0.211486, -1.872480, -0.404099, -0.676289, -1.028066, -0.997946, 0.940869, 0.110667, 1.143299, 0.608134, -1.320946, 0.917046, 2.197168, 0.313836, 1.368181, 0.940270, 0.310563, -0.476830, -1.447140, -1.780340, -1.682355, 1.462738, 0.196439, 0.911649, -0.420793, 1.497158, -2.186535, -2.329298, 1.269993, -0.463545, 2.139990, -0.418947, -1.203943, 1.084465, 1.353191, 0.058443, -1.170981, 1.961121, 2.214397, -0.376900, 2.705678, 0.812824, 2.097522, 0.286724, -1.450737, 0.129898, -2.488995, -0.329558, -0.128161, 0.197423, 1.306825, 0.113337, 1.212518, 0.731604, 0.703685, -0.389565, -0.476361, 1.828673, -0.169361, -2.419929, -1.995867, 2.348401, 0.507247, 0.869877, -2.789433, -2.902169, -0.972280, -7.036316, -1.327737, -1.495203, -3.542428, 2.349562, -0.176259, 0.577736, -0.263452, -1.140350, 1.543239, -0.912279, 0.656487, 0.746208, -2.934609, 0.181207, -1.764477, -0.055239, 0.358976, 0.867107, 1.032517, -1.327570, 0.088860, -2.025778, 0.691003, 0.170094, 2.655743, 0.918418, 2.664694, -2.300812, -3.386097, 0.444859, 1.506870, -0.811175, 1.237885, -1.537902, 0.596099, 0.232010, -1.399363, -0.405298, -1.654561, -1.731333, -3.663979, -0.426686, -1.460794, 2.440381, -1.661958, 0.633296, -0.810366, 6.727916, -0.109063, -1.092131, -0.154039, -1.273530, 1.229352, -0.608609, -0.921947, -2.829079, 0.318393, -3.228506, -3.602714, 0.889819, 6.571033, 0.005032, 1.567258, -1.415679, -0.924295, -1.995832, -0.434035, 2.503207, 0.473745, -0.454890, 0.762829, 0.725630, -0.907874, -0.634107, -0.395222, -0.725407, 2.193555, -1.178942, -2.723499, 1.179755, 1.163202, 2.051129, -4.934263, 1.330124, 0.219635, 0.998203, -0.199323, -1.976434, -1.401147, -0.783115, 1.219287, -1.736259, 1.308689, -2.426950, -1.290456, 0.375212, 0.023432, -2.588938, -0.825791, -2.872364, -2.859827, 1.172949, -1.544376, -0.389452, -1.740335, -0.849052, -1.976815, 1.528051, 0.306343, -0.553086, 0.849244, -1.591414, 3.744164, -0.516943, -0.664582, 2.461721, 0.289016, -0.631565, -0.815701, -1.867728, 0.351050, -3.697519, -0.589073, 1.586753, -0.961073, 1.888311, 0.191198, 1.605824, 0.601401, 2.723006, 0.248367, 0.551193, 0.704792, -0.607476, -1.915296, -1.144370, 1.356979, -0.132788, 0.985682, 0.851733, -0.764221, 1.425332, -2.582093, 0.091918, 1.125133, 1.364352, -1.795812, 1.113049, -1.442453, -0.749418, 0.773318, -0.673764, 0.868663, -0.041924, -1.270996, -0.574109, 1.020069, 1.463071, 2.494691, -0.687432, -0.729114, 2.130355, -2.116971, 0.416911, 0.682230, 1.381197, -2.189674, -1.193410, 0.987836, 0.039156, 1.395965, 1.923637, 1.515283, -0.361489, 0.086324, 0.531358, -0.957165, 0.115298, 0.881636, -2.629735, -0.798789, 1.396895, 0.901492, 0.593501, -2.432323, -1.074346, 0.849968, 1.087177, 0.719528, -0.540680, 1.256600, -2.109005, 1.204493, -0.323969, 0.429343, 1.169857, 0.192498, -3.646500, 0.806962, 0.224534, -0.359960, -0.820515, 0.044796, 0.305256, -0.830295, -1.821882, 1.057876, 0.816326, -2.350938, -1.109166, 1.192291, 0.934139, 1.383155, 0.601578, -0.330424, -0.624600, -0.815868, 1.945924, 2.251176, 1.019237, 1.710544, 1.166476, -1.807430, 0.627442, 0.986137, -0.855849, 0.754001, -1.267901, -1.372234, -1.402915, -0.764807, -0.212386, -0.746779, 0.169666, -1.000273, 0.405105, 0.948420, -0.932038, -0.532197, 1.825088, 1.972480, -0.450919, 1.831221, -1.111704, -0.718301, 1.285014, 1.290329, -0.265116, 0.555321, -1.808289, 1.419344, 0.491138, -0.228002, 0.717023, 0.944590, -2.756543, -0.983593, 0.498523, 1.766066, -2.586064, -2.043901, 1.873527, -1.771113, -2.022461, 1.296479, -2.405484, -0.719389, 0.935015, 0.554301, -2.223877, 0.683203, 0.517766, 1.028774, -0.007526, -4.714740, 4.243845, -0.631999, -1.488392, 1.275212, 0.300827, -0.580512, 1.733458, 2.243669, 3.152543, 0.574098, -1.437298, -0.612046, -0.817055, 2.047636, -1.544132, -1.150256, 4.024682, -1.145868, -0.321381, 0.036888, 0.117946, 0.528881, -2.264805, -2.046049, 2.018342, 0.162491, 0.658578, -0.571300, 2.402366, 1.387818, -1.144415, 0.706773, -0.775977, -3.139971, 3.118639, 0.604142, -2.242523, 0.173237, -0.808030, 1.549709, -0.290061, -1.604086, 1.832886, -3.782074, -1.089165, 1.034331, -0.111195, 0.266396, -1.479985, 0.567228, 2.140018, 0.262131, -2.529051, 2.798075, 2.819704, 1.171151, -3.096925, -0.955713, -0.615862, -3.596173, 0.316286, -1.660588, 0.269885, -0.968338, -1.842830, -2.269946, 2.878476, -1.733101, -2.929590, -0.480505, -1.059464, 1.740165, -0.007570, 0.052200, 1.747636, 3.439924, 0.101046, -1.268906, 0.335412, 2.007034, -1.025929, 1.646694, 1.435544, -0.341828, -0.516777, 0.510846, 0.747431, 1.469532, -1.372038, -1.390458, -0.740782, 0.500687, 0.698110, -0.137589, 1.720983, -1.059209, 0.793849, -0.279748, 4.670698, 0.981363, -0.424805, 0.572715, 2.388191, -2.164352, 5.346311, -1.883411, -0.373628, -0.160289, -0.551086, 0.412956, -1.407958, 0.325130, 1.374554, -1.925456, 0.011369, 1.184993, -0.696698, -0.449753, -3.006976, -0.803612, 3.985201, 0.628395, -0.582267, -1.508372, -1.300323, 0.458271, 7.469186, -0.976351, -0.812085, 0.515193, -1.257922, -1.129265, -0.001616, 2.649155, 0.495838, -1.336125, 1.529925, -0.779347, -1.866342, 0.133681, 2.312066, 1.660315, -0.141330, -0.904580, 3.581172, 0.019073, -1.372449, 0.468618, 2.462330, -0.930026, -0.933784, -1.977075, 1.956501, 0.268021, 1.684524, -0.140436, -2.693254, -1.268742, 0.449132, -0.547531, 1.748071, 3.262753, -1.866733, -0.426130, -2.178544, -0.165114, 0.737579, -0.033724, 0.131663, -5.324664, -0.246939, 1.676050, 1.039152, -0.701957, -0.344552, -0.229530, -0.771047, 0.692829, -1.126916, 0.122709, -0.501818, -0.148112, -3.739330, 0.979546, -1.364197, -2.134974, -1.083797, 1.264926, 0.568090, -0.310036, 1.902893, -2.458756, 1.602398, -2.160991, -0.807726, -2.985387, -1.801137, 0.823810, 3.340411, 2.132172, -2.672569, 0.762695, -0.340871, 1.422122, -0.492634, 2.051865, 1.743893, -0.354465, 4.794233, -0.827860, 0.723966, 2.358460, 0.190277, -1.688527, 3.538512, 0.775830, 1.449130, 2.882816, -0.861145, 0.550950, 0.631133, 0.811810, -0.080506, -0.893572, 3.127145, 0.246261, -0.966694, 0.611752, -1.651355, 0.501113, 1.863260, 0.130295, -0.385667, 0.730604, 0.506296, 0.603679, 1.655040, 0.857641, 1.953502, -0.274846, -1.065344, -0.818920, -0.053031, -0.216042, 1.113688, 1.708415, 0.503735, -0.439812, 0.085326, 1.974412, -1.129410, -2.263748, 0.180430, -1.006026, -0.390191, -0.671141, -0.723127, 1.485837, -2.730896, -2.274291, 2.390012, -0.048095, -1.352887, 0.348627, 0.272339, 1.243004, 0.675311, 0.304342, 1.022612, -0.512891, 1.432814, 0.774289, 1.209724, 0.321353, -1.716853, 2.470210, 0.661494, 1.298754, 2.647157, 2.167352, -1.588103, 2.885778, -1.514561, 2.003929, -0.151694, 3.121073, -0.502605, 0.709648, -0.646359, -0.212184, -1.120872, 1.901744, -1.166306, -2.297191, 0.395841, 0.345555, -0.641248, -1.336668, 0.896454, -0.320724, 3.788651, 1.582513, -1.623298, 1.267899, -1.840239, 1.309733, 1.329613, 2.815268, 1.619774, 0.785730, -1.015563, -2.362249, 0.182186, -2.887139, 1.076801, 1.295002, -3.118979, 1.505913, -0.621413, 0.563019, -0.000360, 0.054379, 1.742873, -1.802679, -2.307700, -2.080847, -0.597570, 2.063887, -0.632688, 3.135220, -0.383116, 2.324251, 0.249234, 1.425846, -0.505650, -1.519348, -2.283833, 2.136272, 1.202572, -0.502516, 1.122624, 0.092540, 0.521259, -2.175407, -0.831501, -0.404301, 0.131983, 0.098812, 0.329781, 0.751166, 1.401148, 0.207615, -1.293648, -0.520432, -0.084056, -0.620572, -2.362188, 1.054522, 1.251415, 0.353496, 0.717204, -1.459248, 1.449751, 1.730279, 0.820404, -0.755251, 0.366820, -1.724594, 1.639398, 0.582624, 0.638566, -0.701108, -2.931173, 0.635212, -1.847404, -0.623347, 1.343464, 0.879213, -0.189924, 0.366114, -1.877230, 2.091859, -0.710351, -1.330898, -0.023267, -1.433759, 1.032883, -0.164378, -1.544834, -0.802009, -0.147338, -3.158469, 0.702302, -1.655990, -0.620430, 3.692657, -0.729728, 0.268183, 0.408644, 0.532836, -0.401248, 0.041959, 0.258363, 1.707924, 0.522655, 0.945591, 0.364155, -0.041151, 0.391319, 0.646094, 1.533962, 4.343158, 0.636135, 1.588725, -0.200152, 1.370110, 0.152343, 1.668502, 3.353289, -2.472697, 0.776751, 0.912892, 1.100329, -0.397742, -1.205941, 0.399664, -1.637245, -2.003112, 1.309702, 0.646123, 1.371912, 0.196675, 2.367531, 0.267502, -1.449905, 1.928928, -0.742447, -0.245650, -1.716811, 0.945224, -0.592774, 1.890682, -0.365412, -1.183000, -3.070894, 1.242379, 0.593016, 0.231816, 0.090744, 2.483765, -0.729865, 0.854456, 0.826709, 3.415189, 2.207824, -0.404572, 0.994595, -1.309107, -0.268799, -0.946287, -2.263054, 1.194651, -1.419608, -0.663798, -0.528309, 3.908534, -5.938332, 0.635583, -0.569915, 1.220457, -0.294586, 2.837642, 0.800591, -0.818307, -0.096893, -2.235005, -1.563161, 0.627740, 2.194680, 1.563681, -0.333328, -2.443478, -2.264539, 1.205792, 1.025922, 1.831161, -0.623546, 2.223986, -0.567398, -1.341268, 2.216712, 1.080471, -0.766948, 0.774700, 1.048705, -1.248191, 0.443985, 0.673279, 3.370724, 1.808142, -1.560064, 1.584379, 0.613911, -2.831164, -1.145923, -1.977830, 1.845394, 1.417885, 0.409615, -1.018333, 1.978347, 1.404990, 1.162900, 0.943632, 1.678036, -1.569082, 0.714683, 0.069758, -1.084424, 1.424060, -0.543550, 1.560557, -0.424512, -0.180426, 0.735663, 2.111835, -0.617141, -0.160254, -3.093450, 1.672516, 1.184744, -1.509728, -2.515543, 0.257744, -0.039987, -1.869883, -1.737732, -2.736469, 2.569211, 2.880934, -2.569121, 1.472750, 1.251466, 1.260776, 1.332785, 0.671242, -3.314538, -2.198795, -1.115016, -0.445950, -0.688170, 0.056947, 0.457271, 0.009432, -1.102267, -0.178054, -1.955634, -0.674558, -0.159688, -2.308983, -0.126551, -0.746742, 0.724022, 0.137716, 1.714678, -0.286383, 0.274415, -0.024592, -2.046172, -0.637060, 0.202975, 0.618997, -0.959988, -2.557364, -0.879654, -1.198814, -2.077335, 1.150322, -2.486706, 1.593242, 0.045499, -2.291189, 1.116508, 0.482507, -2.431943, -3.097333, -0.351557, -1.475482, 0.046465, -1.999517, 0.230853, -4.928410, 0.100102, 1.390009, -0.345562, 0.580213, -1.496691, 3.115069, 0.501402, -1.718943, -2.424936, 3.898009, -0.608152, 0.160677, 0.055421, -0.031607, 0.250230, -0.149788, 1.348658, -2.675330, 2.494114, 0.576467, 1.210514, 1.312075, 2.822564, 0.293929, 0.318963, -3.160077, 1.085700, -3.362732, -1.125940, -1.586181, 0.529336, -0.312040, 0.381185, 0.886623, 0.660722, 0.743214, -0.958793, -0.475246, -1.550586, 1.177952, -2.123308, 0.959841, -1.628015, 0.929789, -0.157312, 3.011603, 0.114650, -0.827822, 0.430165, -1.700076, -0.950380, 0.303212, -0.836592, 1.126112, -0.045868, -1.803094, 0.516004, -0.222089, 1.502576, 0.940191, 1.435250, -1.816076, 2.341066, -2.904779, -1.301980, -1.344776, -0.181449, -1.439783, -3.458403, -0.593239, -0.110873, 3.232347, 0.489341, -3.170200, 0.211684, -0.921975, -0.940839, -0.883399, -2.655984, -2.561061, -1.166499, 0.727030, -0.352341, -0.780770, 1.424971, -1.228519, -2.358953, -1.038689, -2.231122, -0.015513, -1.084853, 1.439958, 1.159530, -0.217411, -0.560229, -1.697035, 0.034073, -2.068393, 3.257823, -4.360054, -4.215295, 1.881029, -1.414001, -1.374195, -3.668932, -0.560354, 0.824165, -2.859716, -0.777071, 1.773608, 0.096602, 1.271389, -0.588287, 4.355747, -0.727979, -0.227092, 0.512032, 1.159031, -0.681646, -0.352979, -2.502501, -1.304312, -1.753210, 1.070594, 4.093348, -3.625806, -0.890541, -0.195012, 0.666276, -0.572682, 0.427824, -0.038257, 2.200452, -0.393409, 1.126617, -0.532790, -0.532255, -1.493815, 0.595699, -1.447249, -2.675511, -0.290311, 2.711103, -1.072857, -0.172172, 0.407848, -2.808752, 1.145212, -0.544969, -0.996059, 1.018824, -0.555070, 0.211478, -0.004442, 0.582596, -0.668361, 1.401366, -0.098813, 3.246641, -3.185895, 2.080038, 0.160968, 1.838961, -0.023389, -3.347831, 5.887652, 1.300398, -1.662176, 1.223447, -4.021576, 1.292036, -1.810053, -1.080596, 0.835527, -0.410658, -0.298249, -0.457787, -1.545626, 1.795876, -0.984113, -1.766650, -0.375398, -2.392029, -1.644748, -0.785998, -5.763386, -3.293936, -0.051615, -1.128634, -0.818426, -1.000686, 0.841410, -3.037892, 1.423512, -0.893654, 3.617955, -1.990744, 1.215377, -5.811119, 0.485499, -0.580929, -0.537345, 1.424533, 2.079854, 0.141992, 0.650065, -1.246158, -1.742799, 1.742426, -0.192220, -0.292407, -0.543976, 0.551129, -1.653536, 1.970969, -1.764221, 1.518165, -7.917214, 1.010946, 1.332931, 0.507154, 1.433316, 1.781129, -0.894100, 3.383435, -1.889388, 0.407124, 0.710977, 0.418546, -2.339482, 0.741511, 3.067454, 0.449851, -1.441965, 1.919511, -0.342265, -0.799464, 0.322938, 1.697500, 1.019305, -3.385572, 0.386305, -0.253275, -0.863457, -1.660045, -0.660654, 0.743807, 0.129033, -0.597894, 0.481607, -1.555833, 0.838941, -1.555304, 0.747505, 0.440190, 0.752784, -1.506714, -1.169028, 3.117794, -3.908451, 0.765022, 2.483656, -1.821594, 3.972262, 1.828213, -0.461630, -2.807651, 1.166925, -1.519157, -0.093785, -1.908515, -0.512475, -4.476390, 1.529089, 0.964360, 1.900128, -0.888700, -0.844534, 1.599827, -2.783862, 2.001250, 1.975040, -1.352037, 2.137866, -1.235946, -1.438479, 0.228712, -2.683421, 0.129679, -0.052211, -0.810313, 0.342873, 0.344503, 2.244840, -0.176502, 1.619955, 0.674555, 0.851681, 3.089395, -2.131029, 1.433718, -0.476998, -0.611014, -2.152775, 0.165411, -0.787076, 2.570150, -0.973344, -1.597286, -0.388607, 2.141652, 0.727819, -0.370297, 0.971056, 0.011458, -1.178635, 1.190090, -0.302382, -0.550987, -0.956748, -0.070032, -0.107558, -0.421611, -0.990505, 0.154211, 0.441118, -3.552241, -0.022393, -0.294533, -0.846129, -3.164885, 1.241660, -0.058364, -3.566252, 0.163943, 0.339022, 0.236127, 1.147684, 0.347062, 1.353426, 2.250771, -0.174392, 0.446126, 1.937500, -0.634967, -0.131388, -0.448045, -1.244684, -0.725132, -1.435691, -1.738168, 0.088492, -1.221726, 0.028294, -1.696285, 0.995742, 0.585122, -0.436960, 0.107112, 0.414259, 1.082209, 0.181578, -0.226709, -4.358131, -1.418336, -0.512266, -1.336164, -0.296457, 7.184535, -0.553087, -2.145152, 2.550748, -0.760159, 1.821675, -2.182503, 3.951545, -0.577346, -2.530480, -3.358564, -1.959923, -1.442705, 0.530144, -0.060973, -0.016459, -0.443560, -0.521755, -2.103884, -1.286840, -2.623459, -0.692700, -0.918089, -0.803318, -0.991122, 1.028888, -1.471276, 1.424925, -1.107191, -0.965495, 0.154568, -1.529995, -1.823717, 1.225945, 0.602767, 0.400017, 0.410254, 1.832579, 2.212959, -0.747431, -2.006568, -0.498442, -4.118858, -0.301151, -0.305645, 0.194513, 2.383834, 0.408205, 3.341026, -0.185864, -1.192237, 0.795731, 2.458055, 1.343174, 1.944866, -1.206866, 2.884596, 0.652055, 3.231265, -1.614543, 0.183459, 0.086889, 0.430526, -4.576360, 0.630569, 0.491099, -0.847796, 0.321519, -2.994968, 3.804292, -1.455904, -0.918893, 2.082967, -0.069049, -1.643836, 3.750937, -0.650173, -0.148329, -0.172040, -0.918323, -0.732911, 1.701877, 3.259797, -0.230898, 0.808398, 1.285467, 0.337568, 1.223102, -1.505829, -0.719409, -0.377685, -1.978758, -0.771919, -4.004796, -2.054174, 0.746841, 0.145322, 1.321690, -2.126296, -2.036186, 2.277106, 0.016663, -0.767762, 0.322893, 1.736591, -1.951746, -0.033855, 0.729972, 0.788443, 0.183445, 0.319821, -6.107355, 0.886826, 1.263803, 0.366631, 0.521532, -1.304369, -2.765462, 0.060144, 0.554792, -1.775460, -0.411091, -0.304042, 0.928038, -0.849652, -4.527589, 1.217022, -3.725563, -5.323983, -0.014202, -1.130519, 2.211132, -0.134026, 0.030539, -0.981018, 0.206317, -0.317929, 3.885501, 0.742766, -0.977644, 1.758858, 2.927278, -1.278957, 0.873640, -1.027960, 1.095670, 0.117764, -2.288657, -3.509668, -0.047583, 1.867454, 1.197928, -2.911638, -2.364637, -1.122951, -4.034993, -0.559865, 0.525332, -1.691924, -0.941415, -0.525242, -0.296868, 1.159536, 1.086406, -1.000133, -2.499835, 1.402897, 4.952349, -1.310867, 3.258150, -0.778347, -0.268580, -0.196423, -0.523824, -1.064013, 0.792269, -6.334123, -0.162495, 0.073113, -1.992890, -1.624917, -0.309036, 1.016871, -3.047204, -0.509374, -2.491468, 1.003789, -1.797539, 0.166563, -0.822455, -1.270393, 0.776553, 0.539906, -0.197045, -0.080154, -1.026400, -1.774185, 0.403188, 1.057254, 0.945254, 2.849382, -1.693514, 0.057934, -0.857088, 0.381920, -3.586883, -0.906704, 0.664517, -1.584051, 0.985732, -1.237406, -1.308203, -0.818927, -0.246370, -2.968447, -0.234438, -1.024057, -0.420833, -1.944758, 0.788728, 0.493811, -0.091664, 2.025641, 2.102431, 0.683854, -0.197049, 0.092760, -3.970118, -0.933897, -0.184341, 0.504163, 3.078834, 2.768745, 0.518151, 3.413862, 0.188685, -2.417682, 0.290612, -2.591668, -0.998794, -1.452080, 8.389590, 0.578668, 1.229078, 1.017800, -1.350522, -2.397149, -2.219742, -0.049068, 1.150641, -0.206271, 0.633414, -0.467812, 0.620190, 4.507153, 2.818350, 0.571067, 0.664137, -0.791071, 0.220109, 1.523981, -1.379269, -0.826409, 0.587569, -2.330011, -0.486263, -1.444806, 0.011725, 6.686158, 0.388426, 0.310425, -2.178561, 0.547642, 1.225539, 1.200455, -1.100693, 3.323766, -1.788453, -1.965163, 0.214364, -0.182455, -0.653088, 1.040133, 0.703285, -2.207464, -2.887304, -0.010447, 1.970276, 1.418307, 0.728565, -0.587430, 0.403100, -1.716673, 0.864310, 0.129826, -1.240194, 0.496615, -1.114163, 0.875957, 0.152658, -0.685775, 0.166833, -1.099020, -0.464600, 0.342878, -1.487813, -0.974075, 1.786528, -0.618787, -0.053368, 0.876156, -1.104406, 1.321758, -1.502696, 0.758169, -0.540708, -0.528666, 3.349993, 0.070434, 1.461162, -3.544199, 1.554642, 0.691654, 0.058363, 1.318910, 0.381145, 0.727529, -1.435946, 0.592560, -3.905318, -0.094539, -1.126841, 3.018564, 2.541469, -1.618298, 0.514458, 1.918137, -1.856116, -1.378256, -3.185948, 0.927860, -0.774732, -3.772287, 1.468701, 4.235704, 0.730582, -0.833708, 0.093555, -0.737915, -2.385541, -1.650165, 0.755775, -3.857619, -1.170876, 0.125429, 0.773791, -3.103241, -0.261559, -0.063795, 0.758651, -0.437210, -2.787760, -0.314942, -4.298765, -1.067320, 0.831327, 0.587873, -2.702937, 0.255556, 1.505676, 0.352411, -0.318188, -1.064380, 1.161265, -0.142489, -2.844601, 4.133595, -1.232116, 1.008827, 0.770166, -1.447083, -2.225446, 0.673317, 1.196312, -0.162135, -0.137597, -0.413933, -0.944130, 0.886967, 1.532139, -3.375753, -0.435553, 0.373217, -0.233012, -1.690537, -1.326474, 1.353997, 5.176952, -2.505056, -1.193604, -0.188181, 1.137731, -0.528390, -0.500226, 0.804525, -1.472408, 2.258844, -1.035661, -2.530096, 1.115152, -0.343501, -0.181792, 1.424267, -2.451889, -0.571956, 0.250034, -0.295899, -0.593026, -0.236472, 2.189043, 0.837979, -2.021635, 2.651701, 0.020600, -1.790586, 0.960398, -0.583401, -0.522204, 0.014939, -0.018401, 0.078440, -2.408650, 0.871342, 0.842621, 2.651843, 0.245014, -1.991489, 2.936932, 1.172129, -0.853839, -0.460163, 0.976885, 1.831560, -0.102884, -2.159085, -0.011857, 0.583826, -0.467147, 0.816048, -3.844757, 0.744443, 0.281508, 0.574364, -0.689570, -0.900283, -0.919239, 1.463283, -3.583446, 1.677760, 0.858401, 6.208580, -1.461956, -2.028792, 0.203862, -0.514053, -0.036900, 0.457810, -0.124636, -0.904888, -1.134708, -0.610438, -0.674280, -1.453439, -0.535142, 0.174291, 1.091521, -1.598876, 0.076207, -2.438708, -0.105628, -0.070608, 0.490392, 0.442303, 0.768931, -0.549218, 0.144212, 0.531282, 0.833482, 0.777007, -2.863263, 2.854618, -0.030283, 0.811216, -0.701562, 3.931106, 0.069982, 0.111765, -0.542222, 1.866537, -0.489160, -1.649108, -3.219932, -1.514001, -0.444119, -1.672627, -1.727185, 0.841360, 3.777926, -0.197962, 1.959742, -0.053520, -1.890777, 2.528770, -0.051881, -0.091327, -0.606603, 0.673440, -0.173713, 1.656440, 0.047424, 0.867838, 0.694412, -0.639834, 0.332601, 0.525874, -2.030658, 1.371668, -1.011419, -0.018221, -2.336400, 0.926431, -1.965634, -3.233287, -1.050931, 0.020313, 0.008429, 0.406452, -0.077658, 0.093131, 1.333634, 1.364920, 1.044719, -0.657374, 0.653661, -2.102872, -0.773771, 0.393569, -0.642374, -1.901093, -1.800561, 0.925856, -0.965688, 2.690480, 1.692870, 0.814953, -1.262914, -0.360530, -1.020819, 0.430978, -1.104572, -0.436584, -0.575685, -1.950012, -0.692778, -0.950671, 0.879754, 0.776726, 0.135039, 1.217412, 4.627985, -1.104025, 2.732612, 0.579952, -0.949408, -0.098364, 0.667268, 3.058710, 0.748927, -2.103567, -1.527468, 0.532417, 0.633727, 1.486574, -0.474240, -1.531879, -0.905295, -0.155898, -0.229789, -0.312329, 0.305685, -1.641188, 0.384322, 2.096009, -3.121164, 2.874967, 1.132273, -1.160576, 2.566724, -0.788500, -0.538607, 1.426128, 0.688084, -0.236464, 0.769345, -2.276692, -0.164642, -2.173046, -0.862540, -1.791600, -0.426579, 1.741279, 1.448222, 0.855803, 0.832409, -2.346241, 0.785191, -1.443790, 0.545450, -1.251285, 0.306100, 0.233932, -0.287912, -0.829601, -2.039009, 0.678542, 0.569348, -0.107695, 1.893350, 2.344699, 0.807760, -0.155852, 2.536183, -1.016996, -0.745473, 4.697989, 0.305604, 1.576742, -1.233571, -1.505556, -1.064806, 1.361340, 0.216110, -0.778844, -1.157238, 0.643939, 2.376352, -0.359937, 0.892480, 0.836386, 2.405310, 0.481724, -1.754836, 1.640052, 2.082973, 2.177998, -0.686278, -0.453808, 1.284923, -1.304886, 1.752683, -1.217982, 1.272659, -0.699114, -0.321490, 0.017435, 0.711109, -0.842600, 2.172017, -0.303333, -0.884819, 0.151581, 0.861716, 5.031510, -0.759949, -1.122212, -1.030814, -0.552033, -2.022228, -0.549100, -1.977696, 0.787984, 0.463481, -0.930972, -1.452297, 0.806012, 2.472124, 0.300999, -0.180325, -0.442161, 0.126769, -1.101915, 1.253808, -1.065416, -0.382719, -1.115863, 1.114735, 0.748863, -0.731119, 1.530700, -1.609571, -1.570549, -2.601246, -0.055399, -0.262242, 2.139844, 1.068339, 1.498265, 4.601118, 0.459227, -0.467124, -0.016818, -0.846170, 0.407918, 0.827337, 2.431346, 0.191171, 0.020118, -1.176208, -0.809230, -0.936010, -0.093285, -2.142360, -0.891113, -1.322299, -0.472136, 0.629155, -0.123252, 0.006292, 1.606415, 0.339211, -2.167264, 1.260660, -1.366756, 0.913975, -1.689967, -0.105102, 1.371262, 1.391783, -0.422733, -0.324468, -0.728885, -0.917710, -0.869212, -0.890510, 1.436343, -1.214656, -0.277204, 2.095023, 2.337512, 2.522190, 0.876392, -0.650990, -1.509156, -0.395380, 1.694393, -1.352576, 1.307880, -0.577378, 2.064705, 1.132324, 1.085479, 1.678848, 1.010567, -0.089989, 1.959001, 0.291288, 0.122632, -0.264179, -1.135114, 1.749955, -1.650667, -0.567050, 0.794765, 2.344459, 1.499657, 0.745928, -0.227571, -0.892684, -0.514688, -2.066394, -1.552437, 1.936354, 0.877455, -0.999577, 0.823305, -0.737004, 0.008926, -2.502623, -0.042059, 1.381846, 0.076001, -3.317323, 0.790721, 2.069414, -1.016928, -0.148098, -1.902225, -1.826419, 3.652578, 1.043417, -0.697644, -1.888567, -2.321141, -0.806015, 1.295105, -1.047930, 0.255314, 0.082459, 0.908516, -0.274531, -0.163356, -0.855869, 2.342495, 0.567557, -1.624329, 0.244196, 0.674070, 0.095953, -0.245474, -1.620124, 0.486911, -0.740139, -0.153294, 2.433330, -1.233023, -2.823224, -1.457184, 2.209202, -2.723373, 1.811669, 0.433449, 2.358466, -0.902272, 2.764589, 2.574164, -1.916335, -0.295650, -0.615423, -1.146909, -0.697601, -1.787206, -1.703290, -0.705669, 0.133116, -0.541842, 1.953445, -0.915688, -0.823846, -1.078612, -0.096120, 0.279419, 1.148154, 2.104976, -1.941778, 0.351885, -0.280529, -1.039893, -1.402304, -1.918783, 0.825614, 0.218580, 1.394795, 1.857791, -0.999485, -1.786477, 2.308074, -1.231458, 0.063259, -0.929566, -7.953049, 0.452391, -1.125412, -1.838698, -2.640692, -2.091324, -2.451443, 0.060629, -2.279750, -1.798112, -2.460948, 1.952752, 0.226525, 1.108863, -0.068355, -0.457852, 1.527080, 0.799076, -2.571167, 0.086885, 1.405293, -0.188894, 1.219698, 11.678942, -2.414214, -0.580795, 1.424143, 0.400336, -2.023376, 3.766325, 1.026467, -0.156651, 3.054648, 0.202205, 2.488985, -3.661493, 2.467989, 0.915066, 1.179819, -1.966798, 1.699531, 3.319919, 0.503657, 0.504187, -0.256452, 0.396068, 1.055521, 0.426303, 0.222733, 1.933537, -1.636191, -1.083127, 0.100778, 1.859645, -0.968564, 0.496013, -0.655377, 1.019886, 1.404765, -1.302617, -0.004665, 2.965729, 0.632558, -6.853092, 0.353593, -1.923296, -4.299502, 0.527606, 1.343225, 2.386575, 0.797445, -2.060218, 2.438961, -1.105527, 1.774719, 1.735536, 4.056285, 0.492659, 0.936320, -2.876715, -0.140729, 0.430298, 1.294615, 0.669918, 0.271047, 1.878645, -1.320951, 1.322578, -1.561556, 0.161816, -1.362854, -1.194663, -0.599474, 0.059861, -1.434035, 2.977043, -1.529187, -0.306176, -0.541282, -0.544553, -1.638931, 0.736715, -2.018234, -0.031919, -1.189114, -1.678918, 0.788182, 0.901396, 1.406277, -0.204452, -1.693584, -0.018040, 0.619784, -1.475589, 0.098325, 4.508126, 3.182445, -1.048236, -1.320299, 0.142103, 0.362558, -2.220502, -1.717681, 1.899212, -1.945802, -2.116177, -0.829608, -0.963384, 0.386516, 0.527406, 0.933299, -0.109855, -2.206789, 2.137621, -0.946538, -0.533125, -0.488820, 1.345459, -1.519374, -2.040171, -0.775507, 0.615902, 0.328110, 0.500276, -3.713193, -2.401886, -1.090950, -0.127374, 1.539707, -0.215972, -0.845000, -1.578743, 1.218012, 1.011701, -0.450705, -1.659180, -2.671433, -2.506796, 1.433076, 1.732815, -0.262248, -3.809357, 0.727647, -1.585780, -1.086602, 0.636478, 0.153659, 1.757239, 1.673101, 0.352491, -0.896577, 0.833177, -2.428023, 1.154135, 3.452332, -0.576276, 0.817413, 1.121931, 4.688684, -1.169102, -3.617183, -1.053644, -1.593388, 0.492168, 1.271367, 1.025314, -0.485588, 1.791973, -0.043998, 0.432523, 0.531555, -1.054497, 0.402924, -0.389349, -1.051243, -0.381169, 1.685210, -0.516398, 1.140060, -0.123690, 0.133988, -0.047918, 0.570673, -1.044458, 0.682566, 0.077535, -0.927769, 1.549031, 0.452144, -0.676709, -1.461138, -1.265611, -0.923475, 0.860736, -0.772783, -0.892272, 0.330651, 0.926755, -0.193207, 0.524169, 1.462198, -0.634567, -0.690321, -1.088943, -0.060451, -1.477703, -3.903250, 0.028010, -1.657811, 0.404509, 1.504079, 1.153226, -1.717037, -0.034980, 0.874089, 0.968220, 4.233302, -0.454326, -0.449342, 1.123919, -0.151024, -0.416263, -0.599757, 1.992396, -0.102499, -1.357982, 1.592250, -3.915248, 1.187836, 0.403215, 1.441373, 0.043264, -0.860627, -0.162307, -0.307341, -1.098346, 2.642102, -4.422021, -3.829863, -1.993164, 2.481008, -1.335412, 2.149526, -0.471009, -0.383002, -1.827906, 1.157187, 1.688772, -0.809894, -0.727387, -1.251942, -2.517845, -0.584093, 0.556983, -0.285559, 0.030616, -0.312395, -0.154149, 0.374468, -2.963465, 2.172886, 0.443644, -2.247712, 0.602167, -0.537698, -0.041397, 1.293626, 0.887306, -1.679885, -2.640060, 1.232915, 1.914094, -1.436891, 3.276104, -0.639831, 1.459533, -0.183050, -0.179623, 1.537835, -0.441862, -1.221415, -1.202643, -0.339080, -1.128653, 0.589814, 1.817411, -0.027398, 1.776857, -0.079182, -0.033620, 0.072972, 0.200031, -1.994103, 0.453007, -0.047347, -2.639814, 0.289200, -0.800354, -2.199389, 0.763129, 1.161637, -3.279893, 3.045088, -1.438141, -0.066738, -0.170937, 3.228382, -0.718058, 0.008865, -0.641102, 1.587295, 1.319516, -1.186299, 0.203019, -1.307042, -0.653389, 0.871392, 1.510107, -0.143629, 0.001508, 0.005160, -2.665793, -1.079569, 0.245808, 0.979850, -2.519487, 0.615138, 0.549478, -0.067410, -0.463877, -1.049949, -1.745986, -0.173153, -0.659071, -2.577020, 0.128683, 0.040614, 0.039138, -1.107141, -0.172750, 0.951488, 2.417813, -3.509801, 1.588482, -0.745076, -0.670997, -2.944751, -0.577594, 1.000117, -0.304519, 1.145466, 0.118416, -0.380714, -1.163751, 2.171146, -1.137656, -1.767855, 1.174660, 2.530463, -0.933513, 2.094655, -1.084815, 1.757184, 2.396101, 0.227469, 1.959815, 0.372973, 0.783120, 0.749221, 0.703332, -0.022950, -1.255128, 0.834598, 0.898361, -0.615497, 1.131268, 1.129401, -0.796847, 0.277227, 0.022084, -0.859409, -1.869596, 2.134916, -0.555658, 2.232072, 0.210954, 0.794191, 1.970781, 1.929192, 1.443892, -0.025377, -1.400775, 1.075047, 0.264515, -0.723359, 0.015378, 2.079454, -0.796261, 0.315285, -1.405742, 2.362265, 3.469258, 2.460742, -0.789086, 0.300839, 0.243847, 1.060216, -1.296327, -0.621006, -1.348864, 1.451716, -0.342701, -1.694321, 0.777496, 2.813106, -1.899789, -0.257446, 1.398711, -2.191065, 2.490599, 0.958931, -0.474872, -0.624368, 1.271940, 0.756449, -0.130391, -0.084979, 1.362932, -1.252360, 0.230489, -3.245268, 1.056214, 3.134413, -1.926303, 0.858515, -0.865250, -0.055744, -2.101393, 0.897214, -0.033249, 1.185271, -0.305044, 0.160016, 0.483297, -1.491721, -0.529542, 2.200641, 0.536981, 0.076141, 2.599686, -0.079704, 0.419227, 3.077253, -0.698236, 0.199733, 1.411079, -2.497913, -0.702270, 0.457026, -0.859669, 3.009468, -0.588098, 1.619484, -2.591563, -0.051351, 1.622896, -1.461597, -1.373804, -0.387197, -2.398344, 0.874232, 1.952216, -0.762721, 0.732759, -0.261160, -0.746371, -1.427404, -1.373406, -0.998958, 0.011014, 0.351923, -3.094855, -2.828198, -0.284748, 0.727657, 0.347360, 2.004357, 0.140569, -3.039148, 0.017318, -0.890693, 0.428602, -0.558460, -0.040785, -0.309201, -2.868190, -1.748198, 6.530122, -1.051790, -0.567761, 0.482915, -3.028437, 1.840485, -1.006252, -2.233927, 0.973578, 2.219829, -1.072354, -0.106849, 0.480852, 0.973742, 0.418984, 2.441674, 0.846208, 0.635905, 0.709736, -0.883262, -0.123730, 2.599570, -0.634373, -1.205588, 2.468590, 1.842569, 1.734810, -0.065832, 2.332975, 0.404545, -0.149880, -0.875170, -0.887063, 0.313458, 1.107406, 2.219567, -0.528014, 0.752688, 0.121217, -0.403403, -2.761505, 1.490429, 1.366968, 2.998828, 1.763393, -0.154834, 2.722400, 2.853975, 2.477918, -0.687242, -1.790297, 0.232602, -1.469884, 6.514165, 0.614459, 1.232791, -0.346462, 1.223361, -1.098690, 1.305230, -0.327180, -0.538847, 1.254126, 2.756663, 2.988449, -2.864126, -4.365308, 0.754789, -1.980978, 3.187338, -0.380813, 1.257949, 0.438836, -0.111813, -0.005574, 2.469913, -0.613243, -0.470447, 0.583182, -0.366051, 0.846679, 2.334949, -0.491402, -1.471897, -0.012673, 0.177996, -1.107022, 0.279164, 0.566607, 0.782696, -1.192201, 2.145116, 1.820361, -1.854302, -0.887881, -1.209134, 0.640661, -1.069622, 1.014870, 0.135546, -1.669796, 0.722794, -0.445563, -2.883994, -0.901784, 1.785513, 0.883540, -1.163346, -0.831124, 0.196551, -1.088842, 0.456436, 1.569065, 1.717653, 0.126327, -0.244374, -0.397899, -0.611888, -0.828950, -0.089661, -2.204021, 0.840066, 0.139645, -2.976500, -0.225026, -0.374762, 0.011366, 1.400090, -0.831109, -0.092224, -0.980802, -2.395170, 0.346915, -0.782422, 1.594723, -5.437796, 1.402780, -2.173856, 0.157068, 1.501613, 1.148977, -0.646014, 0.115535, 3.879438, -0.986376, 0.818065, -0.380006, -0.530464, -3.900960, 0.689789, 0.226382, 1.421528, 0.382571, 0.549739, -0.519871, 1.678414, 0.563494, 0.746316, 1.455697, 0.689835, -0.261754, -1.269034, 0.890723, 1.555183, -0.273945, -0.003310, 0.378518, -0.759207, 0.570393, 2.029422, -0.368552, 1.051289, -2.139588, 0.782326, 0.452396, -0.873124, 1.439425, -0.879747, 0.093171, 1.661324, -0.999725, -0.546240, 0.676964, -3.085967, 3.901103, -1.001095, -0.168283, 1.444647, 1.423535, 3.737822, -0.319527, 0.101771, -1.368534, -1.261986, -0.090278, -1.431562, -2.672978, -0.558734, -0.422858, 0.140180, 0.728199, -0.897009, -0.132918, -1.936385, -0.023013, 0.306948, 2.999689, -0.208378, 0.449371, 2.819936, -1.010662, 0.136802, 1.934311, -0.134759, -0.065738, -0.248452, -0.308273, 2.383839, 3.907777, -0.015681, -3.471670, -0.477245, -1.599790, 0.000448, -1.808520],
+	"phi3:latest":          [-0.331782, 1.888663, 1.200525, 1.289363, 0.998380, -0.655262, -1.793847, 0.665316, 0.505828, 0.716095, -1.893194, 1.218456, -1.309054, -0.447788, 2.593305, -0.537991, 0.450933, 1.653979, -0.929486, -0.024294, -1.257964, 0.415388, 0.712649, 3.635659, 1.193032, 0.633469, -0.152762, 0.126132, -1.080760, -0.906523, 0.261134, -1.082573, 0.121791, 0.041670, 1.182726, 0.020926, 0.890734, -1.112368, 0.272508, -3.172650, 1.007221, -2.737286, -0.752792, 0.806690, -2.347712, -1.033880, 0.257948, -0.655215, 1.230030, 2.431018, -0.001280, -1.203449, -0.937772, -0.775139, 1.965822, 0.182722, 0.805110, -0.064984, -0.172154, 0.108768, 1.113860, -0.687376, -0.136099, -4.118443, 1.489239, -1.689033, 0.121872, 0.570745, -0.513955, 0.448374, 0.134970, 1.531063, 2.303051, -1.522820, 0.932279, -0.187607, 2.760937, 2.080870, -0.708050, -3.668794, -1.114294, 0.577248, -0.542914, -0.491282, 1.031656, -0.686680, -1.205845, 0.370542, -1.197135, 0.493187, -2.067161, 0.590035, -0.312661, 0.010634, -1.059682, -1.019925, -1.079581, -0.784771, -1.534287, -0.745539, -1.438104, 0.408945, 3.307072, 2.807780, 0.836534, -0.323777, 0.078335, -2.040690, 0.593655, 0.305799, -0.598534, -4.537859, 1.865855, -5.356523, 2.168699, 0.199434, 0.458956, -0.146927, -0.180848, -0.307271, -4.499695, 1.250432, -0.918718, -0.555892, 1.550857, 0.533830, 2.284615, 3.066180, -0.809521, 0.429381, 0.940451, -0.420210, -0.439490, -2.831492, -0.230623, 0.812441, -0.747944, -0.177522, -0.476225, 1.488531, 2.712494, 0.107758, 1.136478, -2.371375, 0.506508, 0.539166, -0.304731, 2.540707, 3.669775, 1.193841, 0.099604, 0.755806, 1.671601, -0.837445, 1.567671, 1.423394, 6.966916, 1.032748, -0.020530, -1.179447, -0.181892, -0.358674, -1.033161, -0.457489, 1.743537, 0.312635, 0.463954, 0.293266, -1.393930, 1.422883, 2.216323, 0.043793, 1.275150, 0.627181, 0.413553, -0.359636, -1.511397, -1.454917, -1.244547, 1.734582, -0.206054, 0.634956, -0.609846, 1.435774, -2.048944, -1.875099, 1.053775, -0.301367, 1.839846, -0.719768, -0.903455, 0.973591, 1.267984, 0.139152, -0.465354, 1.631779, 1.706993, -0.568199, 2.394835, 0.359601, 1.617715, -0.432758, -1.461887, -0.135692, -2.426039, -0.496890, -0.319927, -0.110555, 1.209607, 0.801021, 1.248233, 0.551905, 0.589787, -0.270500, -1.057468, 1.335677, -0.727429, -2.010354, -1.193273, 1.876885, 0.199732, 0.952656, -2.354916, -2.555338, -0.792350, -6.405796, -0.973989, -1.177463, -3.435070, 1.956603, 0.029680, 0.261674, -0.993050, -1.284503, 1.697352, -0.895739, 1.074561, 0.786929, -2.653123, 0.719731, -1.782677, 0.104583, -0.070126, 0.849115, 0.879853, -0.959925, 0.023526, -0.805103, 1.399971, 0.228651, 2.112906, 0.769736, 2.598775, -1.616839, -2.866297, 0.688289, 0.967245, -0.528837, 1.043855, -0.813634, 0.620666, -0.286699, -1.673048, -0.169107, -1.623424, -1.321982, -3.332716, -0.411326, -1.684741, 2.134276, -1.740958, 0.598584, -0.992499, 5.294982, -0.481816, -0.680845, 0.113114, -1.231122, 1.018151, -0.652560, -0.647686, -1.782176, 0.413933, -2.831025, -2.853311, 1.074271, 5.026616, 0.050189, 1.695521, -1.131159, -1.007593, -1.860660, -0.210799, 1.702898, 0.002810, -0.689529, 0.576500, 0.157276, -0.664268, -1.006720, -0.562447, -0.269952, 2.138022, -1.618097, -2.395590, 1.264884, 0.771609, 2.024291, -4.463664, 0.976682, 0.611146, 0.557015, 0.406314, -1.554351, -0.683566, -0.509024, 1.003536, -1.233879, 0.620211, -1.912232, -0.843399, 0.396227, 0.217314, -2.936852, -1.082635, -2.436602, -2.979013, 1.416017, -1.253309, 0.128324, -1.531144, -0.823306, -1.627728, 1.464539, 0.598376, -0.625224, 1.379323, -1.296928, 3.733621, -0.402005, -0.502481, 2.202061, 0.491992, -0.293026, -0.224052, -1.743726, -0.025504, -2.823492, -0.801840, 1.461100, -0.946508, 1.663853, 0.516056, 1.118193, 0.872803, 2.391853, 0.363638, 0.386560, 0.916678, -0.255919, -1.655668, -1.329800, 1.121830, -0.179955, 0.905750, 0.410062, -0.460930, 1.100121, -2.608666, -0.037558, 1.052379, 1.468595, -1.940455, 0.987077, -1.020959, -0.850926, 0.665312, -0.616026, 0.937322, 0.222524, -1.306938, -0.538988, 0.753711, 1.228283, 2.270849, -0.202739, -0.543886, 1.655816, -1.952059, 1.137215, 0.786179, 1.228126, -1.704684, -1.457620, 0.465429, 0.629195, 1.137187, 1.215734, 1.220341, -0.233745, 0.021491, 0.501275, -0.742705, 0.215831, 0.588493, -1.650967, -0.646095, 1.238620, 0.216092, -0.247341, -1.943288, -1.044502, 0.166651, 1.419106, 1.097436, -0.842580, 0.843983, -1.672148, 1.034715, 1.160797, -0.133636, 0.251693, 0.256113, -3.728125, 0.572900, 0.126097, -0.321870, -0.849928, 0.025637, 0.330923, -0.445982, -1.722699, 0.861761, 0.936651, -1.674895, -0.613186, 0.897030, 0.337920, 1.018083, 0.290069, 0.253213, -0.409918, -1.146255, 1.666793, 2.334866, 0.914748, 1.321567, 1.553441, -2.307631, 0.384901, 0.905515, -0.835983, 0.719357, -1.188387, -0.899210, -1.526436, -0.771791, -0.037529, -0.709167, -0.076109, -0.429650, 0.227836, 0.862626, -0.529444, -0.304954, 1.671549, 2.157148, -0.435607, 1.332639, -0.833320, -0.782735, 1.286584, 0.476222, 0.580646, 0.349235, -1.786693, 1.406183, 0.420583, -0.624807, 0.697277, 1.321689, -3.137897, -1.231940, 0.382046, 1.303738, -2.219354, -1.742369, 1.506663, -1.336856, -1.776091, 0.907745, -1.661343, -0.198556, 1.129866, 0.409350, -1.672331, 0.343483, 1.037570, 0.916265, -0.317749, -4.326462, 2.636568, -0.474869, -1.698115, 1.248424, 1.024646, -0.269191, 1.498065, 1.702345, 2.846062, 0.303959, -1.147084, -0.372634, -0.913464, 1.784185, -1.335213, -1.163813, 4.534195, -1.033662, -0.350868, 0.177666, -0.375238, -0.047751, -1.759573, -1.817324, 2.531796, 0.092635, 1.402217, -0.186967, 2.302000, 1.183251, -0.981169, -0.027306, -0.962279, -2.556610, 3.168603, 0.538228, -1.538681, 0.256375, -0.111212, 0.701298, 0.082229, -0.830514, 1.423748, -3.368465, -0.577068, 1.678749, -0.018569, 0.380880, -0.929372, 0.169438, 1.732153, 0.219515, -2.110588, 1.828675, 2.531060, 0.813516, -2.485929, -0.849423, -0.583957, -2.631165, 0.372207, -1.723973, 0.548265, -0.508192, -0.842904, -1.948049, 2.395320, -1.950990, -2.131252, -0.735650, -0.807738, 1.517375, 0.411855, -0.417785, 0.695481, 3.132597, -0.309361, -0.962455, -0.060781, 2.010265, -1.197449, 1.309034, 1.667662, -0.401816, -0.328225, -0.066705, 0.457056, 0.990444, -0.956658, -0.681285, -0.944606, 0.523004, 1.199034, 0.087256, 1.541614, -0.848096, 0.792162, -0.501671, 4.378613, 0.894378, -0.441204, 0.082530, 2.260096, -2.228149, 4.717083, -1.186788, -0.597485, -0.368734, -0.063365, 0.530020, -1.802985, 0.295136, 1.495470, -1.487975, 0.038948, 0.951755, -1.020662, -0.375789, -2.556568, -0.653668, 3.495116, 0.442141, -0.633872, -1.655333, -1.468453, 0.401802, 5.745409, -0.341410, -0.855036, 0.268208, -1.166003, -0.433159, -0.545326, 2.481983, 0.158591, -1.682132, 1.009237, -0.638927, -1.334942, -0.322424, 1.449141, 1.163893, -0.386967, -0.553634, 2.797484, -0.416864, -1.247119, 1.006061, 2.358471, -0.628549, -0.750396, -2.033410, 2.005108, -0.166425, 1.544945, 0.330924, -2.188920, -1.281452, 0.556581, -0.307962, 1.716098, 2.836176, -1.910627, -0.076177, -1.542220, 0.026247, 0.593243, 0.279117, 0.156666, -4.955784, -0.407404, 1.488657, 0.278332, -0.734341, -0.596073, -0.102984, -0.959433, 0.578759, -0.861591, 0.053966, -0.694741, 0.218171, -2.926390, 0.739668, -1.028017, -1.277984, -1.073602, 1.215993, 0.658857, -0.567500, 1.734804, -2.023831, 1.212106, -2.246280, -0.623249, -2.868179, -2.218319, 0.457068, 2.811556, 1.820529, -2.150381, 0.600829, 0.195249, 1.761236, -0.900438, 1.695990, 2.442457, 0.005890, 4.132634, -0.806554, 0.317273, 2.583012, 0.318541, -1.193946, 3.290548, 0.562016, 0.586043, 1.734598, -0.688514, 1.017300, 0.592274, 0.646282, -0.230299, -0.987452, 2.416633, 0.276586, -1.211152, 0.489671, -1.519331, 0.649645, 1.909639, -0.362585, 0.118936, 0.645842, 0.215484, 0.286462, 1.842013, 0.883945, 2.218506, -0.037992, -0.904247, -0.967820, 0.039781, -0.308431, 0.977812, 1.038229, 0.272080, -1.008247, -0.643514, 1.502011, -0.841505, -1.984012, 0.658119, -0.705461, -0.114504, -0.567679, -0.782671, 1.356886, -2.388955, -1.850967, 2.288174, -0.108086, -0.687346, 0.663177, 0.029310, 0.631634, 0.812906, 0.721973, 0.997441, -0.352522, 1.084785, 0.760377, 1.334125, 0.123672, -0.779595, 2.224239, 0.468647, 0.928250, 2.011061, 2.181119, -1.410953, 2.479482, -1.403790, 1.424959, 0.406790, 1.980434, 0.134588, 0.862166, -0.949528, -0.337341, -1.506291, 1.816352, -1.102223, -1.759313, 0.545314, -0.063965, -0.738658, -1.891264, 0.946221, -0.055098, 3.403584, 0.949688, -1.543008, 1.460190, -1.655720, 1.365995, 0.707883, 2.725739, 1.254143, 1.414804, -0.568412, -1.905285, 0.287855, -2.530554, 1.387836, 1.471262, -2.719731, 1.009877, 0.172707, 0.678457, 0.125848, -0.191226, 1.651061, -1.352307, -2.129730, -1.374591, -0.710863, 1.778056, -0.322481, 2.552818, -0.223694, 2.006301, -0.296190, 1.284814, -0.344928, -1.339411, -2.225923, 2.261780, 0.551574, -0.428283, 0.623739, -0.364829, 0.695056, -2.190797, -1.229941, -0.197826, 0.286019, 0.191163, 0.437174, 0.147713, 1.924075, 0.221861, -1.403060, -0.173633, -0.037499, -0.786128, -1.993309, 1.023275, 1.362555, 0.158199, 0.860005, -1.885816, 1.648090, 1.289884, 0.680118, -1.098040, 0.564846, -1.239455, 1.307932, 0.748337, 1.255219, -0.376539, -2.487237, 0.452882, -1.010058, 0.268328, 1.320020, 0.866643, -0.074108, 0.488640, -2.088172, 1.387924, -0.639570, -0.641209, -0.218602, -1.815887, 0.825953, -0.198470, -1.210092, -0.497010, -0.210681, -2.419325, 0.553231, -1.253540, -0.517147, 4.352762, -0.799085, 0.504252, 0.464497, 0.093610, -0.448315, -0.296233, 0.199169, 1.354983, 0.231581, 0.976367, 0.393705, -0.523379, 1.192837, 0.335261, 0.945564, 3.786133, 0.382227, 1.075333, -0.256976, 1.035057, -0.135201, 1.343422, 3.342458, -2.192386, 0.500333, 0.435597, 1.305236, -0.226735, -0.834256, 0.098611, -1.596345, -2.535961, 1.523905, 0.706213, 1.110188, 0.023420, 1.759786, 0.474664, -1.082119, 1.594128, -0.718762, -0.903734, -1.400833, 0.956944, -0.433848, 1.021788, -0.578503, -1.086085, -2.859450, 0.879658, 0.919896, 0.171229, 0.558165, 2.145216, -0.021277, 1.009513, 0.582972, 2.934348, 1.970412, -0.673170, 0.964699, -0.573964, -0.626150, -0.726634, -2.370512, 1.713810, -0.796989, -0.907288, -0.532314, 3.342188, -5.447992, 0.883931, -0.699223, 1.340747, -0.359668, 2.887066, 0.468472, -0.982526, 0.212517, -2.172879, -1.553142, 0.135613, 1.943457, 1.718528, -0.249226, -2.330694, -2.559153, 1.097077, 0.748280, 1.828044, -0.524656, 2.009469, -0.519914, -1.051009, 1.699620, 1.160786, -0.875061, 0.185425, 0.500408, -0.795241, 0.310667, 0.311145, 2.406953, 1.839838, -1.669149, 0.615994, 0.463547, -2.903665, -0.764632, -1.713024, 1.773419, 1.473285, 0.020434, -1.151650, 1.636433, 1.188841, 1.036440, 0.686211, -0.420186, -1.188722, 1.165800, 0.269228, -1.095580, 0.975477, -0.131055, 0.880357, -0.418188, -0.159308, -0.024237, 1.685547, -0.061263, -0.631691, -2.868707, 0.931229, 1.412259, -1.641449, -2.638783, 0.436751, 0.367173, -1.436235, -1.445104, -2.042131, 2.555485, 2.337394, -2.245903, 1.240203, 0.918046, 1.153733, 1.284330, 0.361464, -3.080673, -2.015433, -1.228714, -0.329455, -0.411458, 0.501336, -0.242372, -0.311240, -0.457906, -0.264166, -1.588748, -0.373200, 0.102988, -1.740476, 0.046711, -0.766460, 0.646151, -0.115933, 1.173771, -0.387578, -0.368824, 0.337930, -1.969409, -0.603293, 0.761418, 0.375401, -0.134991, -2.419440, -0.987752, -1.550724, -1.443484, 0.387551, -1.833694, 1.399108, 0.442190, -1.677070, 0.487543, 0.320652, -1.907840, -2.211530, -0.361906, -1.273140, -0.319202, -1.474675, 0.329778, -4.529927, 0.303147, 1.402523, -0.490818, 1.104129, -1.276678, 3.073465, 0.199123, -1.776357, -2.077243, 3.452719, -1.242790, 0.465397, 0.007858, -0.324656, 0.180450, 0.232762, 0.780484, -2.317019, 2.227188, 0.324473, 0.411219, 0.811476, 2.597386, 0.243725, 0.347392, -3.275616, 0.981951, -2.699506, -1.501182, -1.494156, 0.404192, -0.352986, 0.488786, 0.572073, -0.005401, 0.975969, -0.757187, -0.536657, -1.597109, 0.619395, -1.360061, 1.888946, -1.221506, 0.641514, -0.359623, 2.311173, 0.278379, -0.678937, 0.036107, -1.858443, -1.245295, 0.234358, -0.976666, 1.461548, -0.674499, -1.413628, 0.317547, -0.356531, 0.763965, 1.204553, 1.554559, -1.456421, 2.624848, -2.279530, -0.900469, -1.071001, -0.013043, -1.226998, -3.024274, -0.869955, 0.695510, 2.545588, 0.894171, -2.687647, 0.134236, -0.433649, -1.225783, -1.425428, -2.081347, -1.657589, -0.886671, 0.438746, -0.457501, -0.734988, 1.162974, -0.482559, -0.832043, -0.858165, -2.294134, -0.019862, -0.822265, 1.362124, 1.221454, -0.332965, -0.394502, -1.896604, 0.148038, -1.534254, 2.975559, -3.707328, -4.243404, 0.972112, -1.119140, -0.895923, -3.589736, 0.002250, 0.438104, -2.746846, -0.514775, 1.392488, 0.154307, 0.787050, -0.418354, 3.335391, -0.787709, -0.280908, 0.374281, 1.434060, -0.347108, 0.099208, -2.174178, -0.603846, -1.961406, 0.674681, 3.798844, -2.831996, -0.836813, -0.447424, 0.679769, 0.221790, 0.490131, 0.028804, 1.772122, -0.108914, 0.974949, -0.489083, -0.532604, -1.453107, 0.567806, -1.318228, -2.452373, -0.822569, 2.135952, -1.244996, 0.221252, 0.031481, -2.563677, 1.483638, -0.696056, -1.030848, 0.453494, 0.258160, 0.481363, 0.410853, 0.933314, -0.757754, 1.240265, -0.048412, 3.024734, -2.914384, 1.755960, -0.113899, 1.265085, -0.000634, -2.687462, 4.921392, 0.976112, -1.478302, 1.317076, -3.834176, 1.117164, -1.640396, -1.026278, 0.920215, -0.737303, -0.399644, 0.080155, -0.742240, 1.506471, -1.287590, -0.772516, -0.405342, -1.340083, -1.341813, -0.505141, -5.087138, -2.829230, 0.037739, -0.562710, -0.333618, -0.741055, 0.571827, -2.530263, 1.743546, -1.025059, 2.990238, -1.766810, 0.032581, -5.881934, 0.320903, -0.584839, -0.173705, 1.223190, 0.873316, -0.377491, 0.358346, -1.018797, -2.016875, 1.876419, 0.198460, -0.315190, -0.175053, 0.796791, -1.651981, 1.614647, -1.253317, 1.393906, -7.832347, 0.025200, 1.087999, 0.356147, 1.559995, 1.378104, -1.098392, 3.203637, -1.381323, 0.754037, 0.470596, 1.061457, -1.721436, 0.582317, 2.470391, 0.632487, -0.780556, 1.541683, -0.279428, -0.507243, 0.097625, 1.535107, 1.662155, -3.241507, 0.258222, -0.842592, -1.014166, -1.692065, -0.952032, 0.573978, -0.080197, -0.230308, 0.503805, -0.745510, 0.399605, -1.895488, 1.306971, -0.052665, 1.025376, -1.271935, -1.116847, 2.998784, -3.219350, 0.237409, 2.378772, -1.603559, 3.485432, 1.610774, -0.148429, -2.102674, 0.865081, -1.310991, -0.048994, -1.506169, -0.894822, -4.080014, 1.655788, 0.798924, 1.823289, -0.862195, -0.463672, 1.421574, -3.193225, 1.129338, 2.071832, -1.163078, 1.666056, -1.420358, -1.901004, 0.628737, -2.239043, 0.337622, 0.136724, -1.480790, -0.241060, 0.154104, 1.458758, -0.181530, 1.333472, 0.651258, 0.434315, 2.803759, -2.275980, 1.517599, 0.095857, -0.277221, -1.823603, -0.043888, -0.718949, 2.178376, -0.979954, -0.734386, -0.240973, 1.171587, 0.007072, 0.237164, 0.618714, 0.416613, -1.074346, 0.910026, -0.416583, -0.977554, -0.349090, 0.335884, -0.804102, -0.217162, -1.014870, 0.661630, 0.323795, -2.940491, 0.065295, -0.096405, -0.415969, -2.336314, 1.417354, -0.325469, -3.590280, 0.277790, 0.332033, -0.174084, 1.081171, 0.342203, 1.295721, 2.129061, -0.386219, 0.250025, 1.626942, -0.799425, 0.717404, -1.062532, -1.003305, 0.141804, -0.680945, -1.261851, -0.141397, -1.268640, -0.083022, -1.416320, 1.519603, 0.234455, -1.619872, 0.154979, 0.497174, 1.281276, 0.109994, 0.098002, -3.324199, -0.159942, -0.085615, -0.569837, -0.338528, 6.693666, -0.398587, -1.605784, 2.807449, -0.692835, 1.700276, -1.787038, 3.429233, -0.245354, -2.531395, -3.319430, -1.394871, -0.941283, 0.358174, 0.097740, -0.220618, -0.389024, -0.919645, -1.610255, -1.089464, -2.378478, -0.529369, -1.008603, -0.831314, -1.461926, 0.180943, -1.451033, 0.876612, -0.984925, -0.830275, 0.209430, -1.168924, -1.927535, 1.980315, 0.165473, -0.059557, 0.572658, 1.740578, 1.783901, -0.732648, -1.389069, -0.243235, -3.893533, 0.119801, -0.287052, -0.202365, 1.593304, 0.603369, 2.476665, 0.121060, -1.646047, 0.897809, 2.111726, 1.401096, 1.891831, -0.847960, 2.102443, 0.641405, 2.511218, -0.974216, 0.308707, -0.056444, 0.295417, -3.832830, 0.944815, 0.900770, -0.782472, 0.551812, -3.032549, 3.269427, -1.041848, -0.837198, 1.905022, -0.781837, -1.386617, 4.149720, -0.836307, -1.045704, -0.542813, -0.954046, -1.242165, 1.294262, 3.201029, 0.146226, 0.916135, 1.273997, 0.238881, 1.169458, -1.391235, -0.443475, -0.352239, -1.541232, -0.628702, -3.420718, -1.154195, 0.870406, 0.426926, 1.007768, -2.553943, -1.896043, 2.682561, -0.070028, -0.579655, 0.069732, 1.805448, -1.493385, -0.014030, 1.065136, 0.721803, -0.196037, -0.116224, -5.761770, 0.321085, 0.585127, 0.342437, 0.194969, -1.084242, -2.148624, 0.218496, 0.610765, -1.830436, 0.003593, -0.647391, 0.671114, -1.094642, -3.978799, 1.761741, -3.087608, -4.422559, -0.306883, -1.370564, 1.767953, 0.557067, 0.367415, -1.202108, 0.883302, -0.572306, 2.690378, 0.457954, -0.687535, 1.898751, 2.806983, -1.539653, 0.481290, -0.449118, 0.685718, 0.252932, -1.865875, -3.047987, -0.464913, 1.844561, 1.025634, -2.673377, -1.857754, -0.710388, -3.639622, -0.550811, 0.052858, -1.624739, -0.677803, -0.077288, -0.348947, 1.134581, 0.823101, -0.623505, -2.068707, 1.280722, 4.378040, -0.646913, 3.254428, -0.911869, -0.311793, -0.135335, -0.917646, -1.206452, 0.818995, -6.080505, -0.410905, -0.240170, -1.363458, -1.410526, -0.463343, 0.588416, -2.528915, 0.003166, -2.290785, 0.747372, -1.356035, -0.436485, -0.469414, -1.004386, -0.183408, 1.013564, -0.360865, -0.062443, -0.589006, -1.419326, 0.299296, 0.893582, 0.839202, 1.844072, -1.792961, 0.155281, -0.427874, 0.656156, -3.803041, -0.734671, 1.116193, -1.512477, 0.479156, -0.151161, -1.155034, -0.197144, 0.111296, -1.950732, -0.132419, -0.841203, -0.496333, -2.204289, 0.840358, 0.668194, -0.458399, 2.389929, 1.406014, 1.096186, 0.240972, 0.447461, -3.678923, -1.163152, 0.175436, 0.408462, 2.573723, 4.032115, 0.454848, 2.694748, 0.336364, -2.220923, -0.304247, -1.812197, -1.046371, -1.624497, 6.753428, 0.528228, 0.922608, 1.345419, -1.229388, -1.285374, -1.599376, 0.520206, 0.930075, 0.260252, 0.603340, -0.182566, 0.766121, 4.089944, 2.273662, -0.037626, 0.414319, -0.656553, 0.146951, 1.360669, -1.079636, -0.826307, 0.414756, -2.050154, -0.647629, -1.377558, -0.393630, 6.011848, 0.640889, -0.154627, -1.887521, 0.481471, 0.725237, 1.003134, -1.270733, 2.535853, -1.852875, -1.984691, 0.043005, -0.443987, -0.389437, 1.072760, 0.560111, -1.313613, -2.585411, 0.133546, 1.392839, 1.851199, 0.017208, -0.485980, 0.367607, -2.342802, 0.872469, 0.066583, -1.242599, 4.111394, -0.146476, 1.085649, -0.035312, -0.503956, -0.112943, -1.205433, -0.747308, 0.415985, -1.268356, -0.792791, 1.866075, -0.000915, -0.271667, 0.365708, -0.842554, 1.575105, -1.128135, 0.158092, -0.245247, -0.574473, 3.047494, -0.265874, 1.028757, -3.174093, 0.967400, 0.360965, 0.124362, 1.167233, 0.489035, 0.399890, -1.406982, 0.218502, -3.281218, -0.117548, -1.164914, 2.541045, 2.162634, -1.261110, 0.025405, 1.531112, -1.094499, -1.398051, -2.569325, 0.621261, -0.205139, -2.601987, 1.171064, 3.429147, 0.961047, -0.915910, 0.368733, -0.575840, -2.255553, -1.477089, 0.568685, -3.323828, -0.923796, 0.205168, 0.653205, -2.400543, -0.558132, -3.194032, 1.002985, -0.180768, -2.512737, 0.063891, -3.336560, -0.744661, 0.573810, 0.475247, -2.307733, 0.523257, 1.883903, 0.275185, -0.486428, -1.109302, 0.903212, -0.608962, -2.266825, 3.171204, -0.486808, 1.248047, 1.099221, -0.721206, -1.942247, 1.158236, 1.321511, 0.112065, -0.146534, -0.657275, -0.975966, 1.080233, 0.726018, -3.309192, 0.111219, 0.196041, -0.303238, -1.521593, -1.417296, 1.367442, 4.683889, -2.147701, -1.278427, 0.281738, 1.325304, -0.442248, -0.233044, 0.668017, -0.926213, 1.914610, -1.363387, -1.467574, 0.566218, -0.036706, 0.036098, 1.457176, -2.147704, -0.130686, 0.336216, -0.102960, -0.411079, 0.397519, 1.477163, 0.701220, -0.862012, 2.165351, 0.421311, -1.535116, 0.957023, -0.406619, -0.703398, 0.390083, -0.390960, 0.505953, -1.844770, 0.486684, 0.622634, 2.276865, 0.390006, -1.794462, 3.046761, 1.139933, -0.690681, -0.637176, 1.128005, 1.537182, -0.171056, -1.709060, 0.774255, 1.622292, -0.540113, 0.032559, -2.939101, 1.034452, -0.047643, 0.627251, -0.193845, -0.933359, -0.824693, 1.581610, -3.401405, 1.365245, 0.804797, 5.781966, -0.620613, -0.819232, 0.286532, -0.013608, -0.401111, -0.019385, -0.975068, -0.228762, -1.362910, -0.421948, -0.307311, -1.042872, -0.600724, 0.335786, 0.980055, -1.113636, -0.386080, -2.104040, -0.463928, 0.145584, -0.081313, 0.333631, 0.383729, -0.287916, -0.731525, -0.016066, 0.488491, 0.786466, -2.475577, 1.969624, -0.217052, 0.278253, -0.660953, 3.377597, 0.404897, 0.234751, -0.731012, 1.725374, -0.333617, -2.050984, -2.739315, -1.582184, -0.261262, -1.801413, -0.339414, 0.579896, 3.266277, -0.550661, 1.065212, 0.230149, -2.011817, 2.741273, -0.538543, 0.066325, -0.364490, 1.228556, 0.111360, 1.560492, 0.151007, 0.964796, 0.644581, -0.208523, 0.673280, 0.281566, -2.014031, 1.429657, -0.506738, 0.335272, -1.626906, 1.183870, -1.474985, -2.690915, 0.285946, 0.239985, -0.117515, 0.560041, -0.020173, 0.569120, 1.303762, 1.398197, 1.400377, -0.687756, -0.055215, -1.550547, -0.860882, 0.574287, -0.416834, -1.646140, -0.914618, 1.004808, -0.774086, 2.231249, 1.869094, 0.348831, -1.579520, -0.769957, -1.260929, 0.016118, -0.515597, -0.014220, -0.557759, -1.408167, -3.954981, -0.844895, 0.536573, 1.389662, -0.036013, 1.038449, 3.763085, -0.668687, 2.365618, -0.408353, -0.638354, -0.021301, 0.897073, 3.448795, 1.097783, -1.940816, -1.079087, 0.483942, 0.348101, 1.459333, -0.240343, -1.605376, -0.875363, 0.207903, -0.158365, -0.183384, 0.176932, -1.071853, 0.137818, 1.278738, -3.137396, 2.100088, 1.134791, -1.402452, 2.098001, -0.710614, -0.260422, 0.105859, 0.886900, -0.366901, 0.476868, -1.463733, -0.125639, -1.413581, -0.344210, -2.025344, -0.230954, 1.216566, 1.471056, 0.887452, 0.611461, -1.538164, 0.544609, -1.231650, 0.191796, -1.383449, 0.405548, 0.155041, -0.489750, -1.401300, -1.529757, 0.509945, 0.348758, -0.521110, 1.377017, 3.161858, 0.606811, 0.314406, 1.927570, -0.825270, -0.977978, 3.989907, 0.089484, 1.463825, -1.250056, -1.150727, -0.890604, 1.450158, 0.458452, -1.391975, -0.951504, 0.557115, 1.886316, -0.510700, 0.382540, 1.213111, 1.983193, 0.953116, -1.650094, 0.941780, 2.112340, 2.355280, -1.153111, -0.110312, 0.905784, -1.395765, 1.423380, -1.828381, 1.234341, -0.488266, 0.098374, 0.333164, 0.560859, -0.746528, 2.225785, -0.167920, -0.703610, 0.009235, 0.838879, 4.100902, -0.117946, -1.748979, -1.176229, -0.446607, -1.695890, -0.848669, -1.402491, 0.766224, 0.369392, -0.832074, -1.762611, 0.938566, 1.810980, 0.253713, 0.366594, -0.267112, 0.009307, -1.032587, 1.134067, -0.706014, 0.310107, -0.698892, 1.282465, 0.015429, -1.059395, 1.280026, -1.450975, -1.318240, -1.726679, -0.293316, -0.117751, 2.234800, -0.050556, 1.506430, 4.292483, 0.512186, -0.891515, -0.327962, -0.405700, 0.234422, 1.084207, 2.313412, 0.162125, 0.118381, -1.418285, -0.862138, -0.873205, 0.081308, -1.934883, -1.073094, -0.982282, -0.505499, 0.749260, -0.577122, -0.654750, 1.449021, 0.306943, -1.911140, 0.980682, -1.073094, 0.447972, -2.175428, -0.213774, 0.837256, 1.717141, -0.795926, -0.266877, -0.564491, -0.925340, -1.065870, -0.910533, 1.055523, -1.351496, -0.016869, 1.579834, 1.840015, 1.514662, 0.768153, -0.491423, -1.101287, -0.322965, 1.516147, -1.348103, 1.222862, -0.791053, 1.111427, 1.296720, 1.091699, 1.366506, 0.918218, 0.097257, 0.949652, 0.347960, -0.139513, -0.336694, -0.579173, 1.550384, -1.480493, -0.546617, 0.190767, 1.347142, 1.347428, 0.603905, 0.244730, -0.917853, -0.125428, -1.975803, -1.177999, 0.743994, 0.567265, -0.754483, 0.828847, -0.532350, -0.092983, -2.319595, 0.007637, 0.986628, 0.460779, -2.962568, 0.968618, 1.898865, -0.655724, 0.045216, -1.849113, -1.734549, 3.152958, 1.040847, -0.292312, -1.699247, -2.377090, -0.451918, 0.840069, -1.460622, 0.528579, 0.339228, 0.640661, -0.167105, 0.099803, -0.965883, 1.898624, 0.906129, -1.333380, -0.146857, 0.998675, 0.364237, -0.503700, -1.344844, 0.361299, -0.434851, 0.067926, 1.650278, -1.241045, -2.308764, -1.284775, 1.536099, -2.371135, 1.373430, -0.042336, 2.428177, -1.105025, 2.365310, 2.691403, -1.544075, -0.270095, -0.679794, -0.868711, -0.824657, -2.068143, -2.398679, -0.224238, 0.231087, -0.223123, 2.066741, -0.308522, -1.142613, -1.085934, -0.159634, 0.242321, 1.321443, 1.747885, -2.101142, 0.607077, -0.277772, -0.545982, -1.171588, -1.558956, 0.588822, 0.689636, 1.537991, 1.921086, -1.138457, -1.378528, 2.287701, -1.555898, 0.119641, -0.915154, -8.828858, 0.881397, -1.195916, -1.872201, -2.569476, -2.323321, -2.365253, -0.396521, -2.205989, -1.410965, -2.422898, 1.466792, 0.649925, 1.219864, -0.747877, -0.266348, 0.991046, 0.891695, -2.195708, 0.680926, 1.209149, -0.210787, 1.210128, 10.610557, -2.100216, -0.083745, 1.247023, 0.935504, -1.588422, 2.845443, 0.795987, -0.380745, 2.624293, -0.053490, 2.473472, -2.868124, 1.383236, 0.711351, 1.147141, -1.749486, 1.416122, 2.846822, 0.376048, 0.323461, -0.725459, 0.639029, 0.754552, 0.453735, -0.531116, 1.227112, -1.815590, -0.198580, -0.407187, 1.864983, -1.030108, 0.005300, -0.603299, 0.221345, 0.428787, -0.543272, -0.332352, 2.984221, 0.202327, -5.485187, 0.535822, -1.468278, -3.729011, 0.606648, 1.700566, 1.987126, 0.941603, -1.677516, 1.996796, -1.032935, 1.982177, 1.363331, 3.309048, 0.001834, 0.811148, -2.019633, 0.028331, 0.866474, 0.507630, 0.226419, 0.544389, 1.711248, -0.764391, 1.076274, -1.722000, 0.033348, -1.926957, -1.266744, -0.601890, 0.482954, -1.266198, 2.610475, -1.327006, -0.667902, -1.140673, -0.099913, -1.162451, 0.423360, -1.963485, 0.139259, -0.620451, -1.460460, 0.638760, 0.416026, 1.097994, -0.513509, -1.205988, -0.172922, 0.569500, -1.262260, 0.069107, 3.552333, 2.873424, -0.936410, -0.988784, 0.112748, 0.826013, -2.480992, -1.644056, 1.300677, -1.467067, -1.655116, -0.570310, -0.814359, 0.405384, 0.770688, 1.133055, -0.133439, -1.938563, 1.494834, -0.301125, -0.387572, -0.750247, 0.541174, -1.642030, -1.938908, -0.677077, 0.318583, 0.249073, 0.524054, -3.372515, -1.696765, -0.695675, 0.689538, 1.064140, -0.246079, -0.554691, -1.488866, 0.984185, 1.027294, -0.596839, -1.093387, -3.009314, -1.978770, 1.159572, 1.168016, -0.330223, -3.284733, 1.291373, -1.436988, -0.950019, 0.965799, -0.077165, 1.572996, 1.267165, -0.229341, -0.213976, 1.009663, -2.332169, 0.985686, 1.952643, -0.464690, 0.870720, 1.139017, 4.479065, -1.564637, -2.089600, -0.682516, -0.824428, 0.295403, 1.328673, 0.951028, -0.364461, 1.950723, 0.535897, 0.285394, 0.804959, -1.240344, 0.207651, -0.463670, -0.450987, -0.680175, 1.419628, -0.364446, 1.160957, -0.477101, -0.219125, 0.160403, 0.470533, -0.444204, 0.862442, 0.198758, -1.226983, 1.495980, 0.226520, -0.723279, -1.507346, -0.466927, -1.450365, 0.764960, -1.442695, 0.182756, 0.328774, 0.886299, -0.596380, -0.203388, 1.011195, -0.766657, -1.136147, -0.374787, -0.275476, -1.058399, -3.041116, 0.434512, -1.394619, 0.625784, 1.467996, 0.442531, -1.457825, 0.209731, 1.162713, 0.467531, 3.316443, -0.461404, -0.440338, 0.912147, -0.356602, -0.213607, -0.530755, 1.978598, -0.493557, -1.528348, 1.450376, -3.712212, 1.629677, 0.314268, 1.076917, 0.172830, -0.658227, 0.078557, -0.190502, -0.809829, 1.809008, -3.190910, -3.268541, -1.825201, 1.854339, -1.493917, 1.862117, -0.853410, 0.036446, -1.382741, 0.457911, 1.435908, -0.519817, -0.551580, -1.203174, -2.070760, -0.745401, 0.554885, 0.254418, -0.210398, -0.413140, -0.294672, 0.634805, -2.582246, 2.121699, 0.268622, -1.482626, 0.379675, -0.467487, 0.169446, 1.307032, 0.580747, -1.428477, -2.967444, 0.660875, 1.832389, -1.248632, 3.130222, 0.024276, 1.051472, -0.682976, -0.317363, 1.123660, -0.709277, -1.417477, -0.986425, -0.298695, -1.003966, 0.361008, 0.986081, -0.355593, 1.545899, -0.062260, -0.144238, 0.113454, 0.347574, -1.912898, 0.454727, -0.372725, -1.981350, 0.008065, -0.947537, -2.363691, 0.655844, 1.178620, -2.531780, 2.641989, -1.082047, 0.048716, -0.560684, 2.803992, -0.401537, -0.054567, -0.589765, 1.519056, 1.457955, -0.394978, -0.203464, -1.381867, -0.570133, 1.159070, 1.272267, -0.157922, -0.373871, 0.099950, -2.147384, -0.859526, 0.580483, 1.258086, -1.811940, 0.999685, 0.498097, -0.208321, -0.355527, -1.407621, -0.985762, 0.651858, -0.482860, -2.400900, 0.738344, -0.063397, 0.446209, -0.798227, 0.203654, 0.930894, 2.156682, -2.504100, 1.246757, -0.226142, -0.051957, -2.631084, -0.153232, 1.148969, -0.016944, 1.010929, 0.021860, -0.662591, -1.226117, 1.644994, -0.059457, -1.183619, 0.514289, 2.352145, -0.220011, 1.431658, -1.267419, 1.970766, 2.305432, 0.049004, 1.666530, 0.574792, 0.277784, 0.800149, 1.009731, 0.389355, -0.792400, 0.567103, 0.747645, -0.571798, 1.003928, 0.624220, -0.563435, 0.667803, 0.088025, -0.928825, -1.790301, 1.901877, -0.541776, 1.394022, -0.126833, 0.875269, 1.189161, 1.737956, 1.661530, 0.203104, -2.118729, 1.117041, -0.128317, -0.311372, 0.014482, 1.714613, -0.489954, 0.444651, -1.594383, 2.181497, 2.966969, 1.521034, -0.334319, 0.392132, 0.416176, 1.389801, -0.868350, -0.728446, -0.865140, 1.497067, -0.037759, -1.570476, 0.805095, 2.005113, -1.287984, 0.413729, 1.169729, -2.043537, 1.945287, 0.750359, 0.126739, -0.790029, 1.304844, 0.328200, -0.627986, -0.490445, 0.714442, -0.914642, 0.197159, -2.827106, 0.997385, 3.489973, -1.073825, 0.653735, -0.995691, 0.280912, -1.555071, 0.767915, -0.224688, 1.522421, -0.415166, -0.191174, 0.582823, -1.135045, -0.169796, 1.546901, 0.071049, 0.284257, 2.773633, -0.179967, 0.426636, 2.561610, -0.822087, -0.031908, 1.304569, -2.126102, -0.829586, 1.164391, -0.810587, 2.414972, -0.707713, 5.064760, -2.588564, -0.219488, 1.248683, -1.401357, -1.779323, -0.341826, -2.126092, 0.527902, 1.323854, -0.591890, 0.774245, -0.843360, -0.940496, -1.516691, -0.360964, -0.833856, -0.073956, 0.339857, -2.194838, -2.514215, 0.193764, 0.800907, 0.560305, 1.772881, -0.142944, -2.448337, -0.445462, -1.048851, -0.107924, -0.572082, -0.291778, -0.296814, -2.420761, -1.365350, 6.693027, -1.209041, -0.066411, 0.129719, -2.280248, 1.539942, -1.038967, -2.091413, 0.956829, 1.592810, -0.562970, 0.009215, 0.293238, 0.985026, 0.505498, 2.863027, 0.593295, 0.521377, 0.755354, -1.455579, -0.282014, 2.376681, -0.889321, -1.516596, 2.316878, 1.763739, 1.591733, 0.514772, 1.763502, 0.324903, 0.243009, -0.708746, -0.963440, 0.783172, 0.829486, 1.675332, -0.468778, 0.556697, 0.114040, -0.370741, -2.333942, 0.774582, 1.228402, 2.680446, 1.297540, -0.724778, 2.412759, 2.449544, 2.766661, -0.809107, -1.265909, -0.594859, -1.587142, 5.402306, 0.201947, 1.070469, -0.258558, 1.065963, -0.989826, 1.529247, -0.507681, -0.647435, 1.224182, 2.883728, 2.792404, -1.001071, -3.945534, 0.930196, -1.382639, 2.686813, -0.405884, 0.766733, 0.539134, -0.295801, -0.224468, 1.500429, -0.555533, -1.335520, 0.096897, -0.415765, 0.606984, 2.125623, 0.011233, -1.109010, -0.507921, 0.001881, -1.200419, 0.509064, 0.582445, 0.778606, -1.067579, 0.467620, 1.290178, -1.526835, -0.636731, -0.707597, 0.425678, -1.071439, 0.891022, 0.090920, -0.921217, 0.565343, -0.517547, -2.092988, -0.889626, 1.468542, 0.628201, -0.472464, -0.625626, 0.615259, -0.921866, -0.012971, 1.453303, 1.554652, -0.056447, -0.678958, -0.308031, -0.713133, -0.915579, -0.870202, -1.749760, 1.451643, -0.278964, -2.563250, -0.145331, -0.523267, -0.415809, 0.755618, -0.369725, -0.295338, -0.383275, -1.980165, -0.113271, -0.397523, 0.909846, -5.292087, 1.153899, -1.791563, 0.469949, 1.437734, 0.524201, -0.943633, 0.030079, 3.490268, -0.583055, 0.664409, -0.289061, -0.868615, -2.631852, 0.654866, 0.767513, 1.965486, -0.010949, 0.017394, -0.443159, 1.697919, -0.098348, 0.924185, 0.775087, 0.854382, 0.147878, -0.812742, 0.737789, 1.358073, -0.401285, -0.354132, 0.008942, -0.822113, 0.592047, 1.777928, -0.399422, 0.472083, -1.602400, 0.261083, 0.407126, -0.276465, 1.631465, -0.616176, 0.254818, 1.403592, -0.688121, -0.240744, 0.812869, -2.323307, 3.856091, -0.730178, 0.076064, 1.194804, 1.556197, 3.100048, -0.571741, 0.489173, -1.052799, -0.899246, 0.280042, -1.118764, -1.875434, -0.439993, -1.015147, 0.281342, -0.317847, -0.517845, -0.370270, -2.244694, 0.271209, 0.636193, 2.316612, -0.182801, 0.692837, 2.745709, -0.921832, 0.691077, 1.229511, 0.042834, -0.268188, 0.343785, -0.351455, 2.286732, 3.314201, 0.140688, -2.583246, -0.356457, -1.416211, 0.058137, -1.526017],
+	"stablelm2:latest":     [-1.653425, -4.527890, -4.877004, -3.327034, 0.434821, 1.701704, 3.526903, -4.469802, 4.630357, 0.227063, -0.190526, 1.999979, 4.078742, 0.298663, -4.610620, -6.247628, 2.257033, -4.416355, 3.772629, 4.553523, 6.998239, 2.390150, -2.438607, 0.425828, 5.858243, 3.050587, -1.468938, -3.427410, 1.221681, 3.416313, 4.253255, -5.298667, 1.102529, -3.557044, -2.505486, 5.188248, -0.989836, 0.362957, -6.202555, 0.187488, 2.013862, -5.351580, -0.717793, -0.331573, -3.788537, 3.090831, -2.763875, -1.444800, -9.508030, -3.012328, -2.278918, -1.952722, 8.289286, -1.795957, -0.417283, -5.368218, 1.483804, 0.258438, -10.793324, -0.170621, 1.341145, 3.216485, -2.294435, -11.325250, 7.499690, -0.229075, 4.033194, -2.022096, -2.066516, 3.306540, -6.398740, 4.128799, -2.652145, -4.245577, -2.925680, -1.731119, 0.467985, 5.360130, 2.190478, -2.410886, -1.811716, 0.268970, -5.523365, -1.683424, 5.559467, 0.999820, -0.455709, -4.093735, 0.959086, -2.600840, 2.411732, -0.998069, -1.692331, -2.029156, -0.735666, -3.147539, 1.299742, 0.095286, -2.520393, 5.797514, -3.167619, -8.538287, 0.815279, 6.567674, 12.046744, -7.834114, 2.572222, -8.878610, -4.626774, 1.294825, 8.148865, 10.371354, -0.124268, -3.667609, -0.973693, -1.063670, -3.439687, -6.460613, -7.701084, -2.760577, -14.492188, 0.164426, -0.574549, 6.525961, 6.609715, 2.040981, 5.896592, 4.514081, 6.597866, 1.675606, -0.119387, -2.212113, 6.467727, -1.213706, -9.323136, -0.693238, -5.086019, 2.603607, -5.502177, -4.949139, -1.244192, -2.954522, -2.206366, -2.425069, 4.410294, 1.227588, -0.394113, -2.969205, 4.004092, -3.239883, 4.231017, 4.769729, 4.615758, -7.733798, 1.271179, -2.725834, 0.671723, -3.348217, 7.956835, -12.407818, 6.270431, -3.478354, 4.388823, -3.833550, 1.732134, 5.342522, 2.818245, -1.669699, 0.843653, -0.263149, -3.813351, -0.002553, -2.829279, 2.242092, -4.844329, 4.352617, -5.227120, -0.456413, 1.326151, 3.658723, -6.018328, -0.463759, -2.318596, -2.668297, 4.961249, 0.332518, -6.405596, -0.779784, 8.303428, -0.356073, -0.476739, 5.318171, -0.729066, -1.279565, -0.168029, -2.664624, -2.886173, -3.781708, -8.054917, -2.770633, -1.900103, 1.705698, 0.827769, 3.430624, -0.810647, 0.054838, -3.096855, 3.872227, 5.814599, 1.282295, 4.651320, 1.548328, 3.391430, 5.546462, 0.517707, -5.639794, 2.234933, 2.315346, -1.303822, 2.506015, -5.120077, 1.068318, -10.178645, -1.690384, -4.059442, -4.102598, -7.739625, -1.987418, 9.097525, 4.775144, -7.737073, -5.659193, -1.349567, -2.326614, 3.676482, 2.931034, -5.102944, 1.715105, 4.865362, -4.627227, -6.429954, -1.112960, -0.858902, -1.824539, 1.637483, -3.371596, -0.620898, 4.749110, -2.166193, -0.805950, -4.718459, 1.605792, -0.939038, -4.040066, 2.002779, -5.702419, 6.565500, -0.903362, -9.009130, 3.520554, 1.029390, -3.308164, -2.322248, -2.632967, -2.356549, -5.540984, 6.991878, -1.809226, 1.707506, 1.601529, -5.887782, -5.692646, 7.399372, -4.395493, 5.871286, 9.472426, -0.359002, -2.470697, 0.435673, -0.644079, -5.350487, 1.608036, 2.127206, -3.599508, -1.087288, -3.679646, -12.158961, -1.540634, -2.502623, 0.651748, -1.322796, 3.151287, 9.182612, 2.028822, -0.812259, -3.164355, -1.381061, 2.889172, 3.109571, -1.242814, 2.625086, 4.967103, 2.446508, 4.618402, 3.096960, -1.840115, -4.565569, 1.562091, -6.668737, 0.893992, -5.300009, -4.409395, -4.273031, -1.051213, 0.975259, 1.476943, 6.330149, 4.270274, -11.452203, -0.883367, 1.148818, -3.492275, -1.791984, -2.602362, 1.316202, -1.428396, -0.278130, 4.541390, -0.525913, -2.048077, 2.950917, -8.256606, -0.619955, -8.175155, 6.219592, -4.559412, -5.029519, 1.302574, 2.189243, 5.993953, 1.107539, 4.641527, -2.683317, -5.585598, 1.786944, 0.677125, 0.233469, 4.144743, -0.264630, -9.546192, 1.356367, -6.896400, -0.728840, 7.910208, -0.524742, 0.772314, -13.003156, 2.716943, 5.011025, 8.707234, 5.124669, 10.650016, 2.541430, 4.903395, 1.203507, 5.391636, -1.071023, -6.836140, -2.679827, 1.561458, -5.448033, 4.679675, 3.506107, -0.709596, -0.482555, -5.619187, -4.259030, -0.654340, 5.424935, 2.510676, -0.936929, -0.446392, -0.840864, -3.871309, 0.425172, 1.525210, 0.039525, -6.836664, 1.961243, 1.491557, -1.030146, -5.348517, 0.169497, -1.356025, -2.163417, 5.469456, 0.066804, 1.929168, -3.859758, -3.962416, 0.119438, -0.293607, 2.149185, 5.052205, 1.318201, 6.010693, 4.904095, -2.518855, -4.074033, -1.353910, -5.139940, -2.222615, 8.519213, -4.054481, 2.562309, -5.009195, 2.178149, -0.613098, -1.953666, -3.135884, -0.035426, -5.914386, 6.685964, 1.986224, 3.855035, -1.448132, 1.943400, -1.810771, -4.663630, -0.055918, 3.650834, -2.328945, -0.343562, -1.699932, 3.589826, -1.778784, -3.079531, 1.289937, -3.386725, -2.422012, -0.408499, -6.427080, 3.272934, -1.943715, -2.241102, -2.410607, 4.938379, 0.192622, -1.478900, -3.973377, 0.141261, -2.835047, 4.658964, -7.517603, -1.879068, 1.348901, 1.734751, 5.640635, -4.243168, 1.005301, -9.276393, 1.285112, -5.875256, -2.468377, -0.461431, -1.482033, -0.673434, -5.684919, 3.922001, 3.804457, 3.012733, 1.698399, -5.566331, -6.048701, -4.414396, 5.351144, 3.322751, 0.896671, -4.106427, 1.860496, -0.253392, 1.277863, -7.608521, 6.061233, 5.827372, 6.431350, 2.389126, 2.007912, -3.222721, -4.483950, 0.731796, -3.598605, -3.288245, -3.541759, -2.779022, 0.147720, 2.639788, 4.095247, -2.116362, -1.391803, 1.351580, -3.107973, 2.237751, 2.010487, -1.263100, -3.238010, 8.573421, 5.392852, 3.159662, 3.357732, -0.948037, 4.364056, -2.137167, -3.219444, 1.305905, 2.827949, -2.033297, -2.093264, -5.610653, -7.001668, -2.163801, 2.376028, -4.681757, 5.156943, 1.829207, 0.746286, 2.314424, -2.283703, 0.279357, -3.448669, 2.972520, -0.545495, 0.310627, -0.852161, -7.286296, -2.648488, 0.934100, 2.379191, 5.979376, 0.192706, -3.829838, -2.468099, 0.100848, -6.053266, 5.135528, 3.334038, 1.584143, 3.258758, 5.504037, -11.328886, 0.530728, -0.574380, -1.293216, -0.792869, 0.820730, -1.782550, -0.205883, -3.883558, -1.312371, 2.503407, -0.702521, -1.633566, -1.069555, -1.567426, -0.230182, 1.784593, -0.737677, 4.364082, 2.460050, -4.924289, -6.106172, -3.849522, -6.653006, 5.791493, 5.874276, -3.370222, 3.113476, -4.759202, 3.331463, 4.267272, 0.264762, 3.669857, -6.741902, 2.898152, 6.440811, 6.956420, -0.835060, -2.215858, 0.249587, -4.716614, -0.861111, 8.428898, 4.730777, -2.566945, 5.364206, 4.415503, 1.863187, -0.282807, -3.007959, 3.352833, 2.522977, 0.141859, -3.279416, -0.489100, 6.349781, 2.915948, -8.313710, -0.765640, -4.500856, -2.149416, -1.429163, 2.745811, 3.426836, -1.849564, 3.420510, 3.182203, 6.437835, 0.332201, 1.293108, 1.917845, -1.826147, 0.560607, -4.406342, -8.274857, -7.816263, -0.885038, 3.031509, -5.290300, 1.006592, 6.433846, 0.471501, 3.736609, 5.074759, 4.981182, -3.678520, -2.021518, 10.449417, -0.564782, 1.300790, 0.502043, 12.881732, 4.540535, -4.317128, 2.051635, 0.101281, 0.239251, -5.920107, 2.336436, -0.110763, -0.277836, -0.974074, 0.063837, 4.152015, 0.325702, -2.058868, -5.299603, 5.149256, -2.832694, -1.603464, -4.600780, -4.658039, 8.158319, -3.264244, -11.934885, -4.573738, 6.673770, 1.439004, 6.602091, -2.991610, -1.654177, 3.248230, 3.939474, -6.756430, -2.517539, -0.613864, -0.975685, -3.253090, -1.550883, 1.116012, 8.072521, 1.273055, -3.864636, 0.816139, -0.950701, -2.670830, 5.195287, -2.328566, -3.112808, -4.658459, -0.805908, -3.067221, 4.318442, 8.389666, -5.030275, 0.058931, -1.826782, 1.134502, -1.841629, -0.575503, 3.915390, 3.050701, -3.814120, 4.136434, 5.313198, -16.515984, -3.495671, 2.929760, 6.529726, 0.853020, -4.347084, 1.884411, 2.921730, 4.515150, -2.303609, 10.394044, -1.102041, -3.211182, 0.725145, -1.796741, 9.977491, -0.648789, -4.959561, 2.794793, 9.351508, 4.972434, 12.403986, -2.706991, 4.984309, 2.806412, -0.491541, 6.124185, -0.379290, -7.301085, 1.975749, -2.017454, -5.018500, -2.437666, -1.782842, 4.566490, -2.922570, -2.925570, 2.754348, 2.749041, -2.033346, -7.835915, 3.649824, 6.765293, -1.011391, 1.864064, -7.500477, -2.595671, -4.921342, 2.147914, -2.115498, -0.801049, 6.279182, -0.197686, 11.327653, -5.934701, -7.260524, -2.454565, -3.408250, 2.158854, 1.159594, -6.104372, 0.241652, 23.164333, -5.603174, 6.017294, -0.571283, -6.029321, 1.543975, 1.540088, 2.072177, 1.757015, -5.177544, -7.231556, -26.657049, 0.473235, 0.352308, 2.230681, 3.589925, -2.697782, 4.523419, -4.524393, 0.514307, -0.900148, -1.682595, 2.504208, -7.411628, 6.241665, 0.796579, -1.218847, 4.472510, -0.540995, -1.754540, -6.618201, 2.994895, -3.403845, -6.894874, 1.718728, -1.100953, 10.106449, 4.881456, 3.669082, 3.294474, 2.485058, -1.061432, -0.072556, -6.104910, -4.190408, -7.087040, -0.934537, 1.141285, -0.152905, 5.990927, 21.803249, 3.692633, 0.646055, -15.504597, -1.677958, 2.374717, 5.231987, -0.374639, -5.891757, -9.832679, 0.603590, 1.253641, 8.749701, 2.263827, -1.956822, -0.234216, -0.836304, 1.517358, 1.591627, 4.201532, -2.462435, -2.727039, 1.439295, 3.413836, 6.225077, -2.572009, -9.178096, 0.580842, 8.541307, -7.947644, -2.963300, 1.793434, 4.577036, -3.500894, 1.535902, -8.535811, -3.740390, 6.137311, -2.559419, 2.890654, -3.380768, -2.330120, 2.918745, 2.383862, -2.060155, 1.042335, 2.117087, 4.053154, -2.273367, 2.529394, -1.717983, 7.215990, -4.043588, -1.948729, 1.058707, -3.974885, -0.589312, -2.932399, 2.257031, -4.331958, -0.382463, 2.482601, -5.368923, 3.443856, 7.799732, -4.793459, -2.265096, -2.682369, 11.392784, -8.612529, 6.155714, 1.332527, -6.299338, 4.072433, 3.686455, 5.212699, -1.562696, 3.388870, 4.463029, -1.301285, 0.617910, 4.544553, 0.401876, -4.455338, 4.778875, 8.848549, 0.613039, -3.292335, -10.968876, 0.494941, -1.651135, -4.843451, 1.116010, 0.260165, 3.420653, 3.682390, 8.025309, 4.441313, -4.517295, -4.679897, -3.862373, -0.206641, -2.444820, -0.828062, -0.811257, -1.055606, -4.862272, 1.180074, -4.215420, -6.076351, 9.779222, 5.339531, 4.378951, 2.873308, -0.683237, -1.936983, 5.560632, -1.555797, -0.726255, -2.257168, -3.157807, -4.559538, -2.570522, 4.684027, -7.325606, -3.542253, 0.778399, 0.183623, -3.961788, -0.930686, 3.844731, -3.814492, -4.890561, -1.906883, 3.408762, 7.770661, -1.412129, 6.053822, 3.116910, -2.239404, -4.743374, -3.551391, -5.377923, 2.254163, 7.574185, -1.413529, -3.561156, 2.421036, -1.718103, 4.480564, -6.611534, -5.721527, -0.189614, -5.132884, -1.187861, -3.024539, 1.856411, 5.378809, 0.367076, 2.214311, 4.217255, -2.399374, -8.449114, -3.636938, -6.007771, -6.751559, -3.842293, -2.011700, 0.578463, 2.428024, -6.291073, 8.905704, -1.470243, -0.017971, 3.929838, -4.127206, 4.167617, -1.194076, 2.293453, -1.130347, 2.758235, 0.314015, 4.531409, -1.481377, 1.207142, 6.048965, 2.260943, 13.881939, -0.718647, 0.488885, -0.471996, 1.053957, 7.320283, 0.283981, -8.531932, -3.530199, 4.011388, -0.629390, -1.018878, 1.265667, -2.407388, -4.780603, -2.249164, -0.449141, -3.792957, 2.530940, 3.387525, -2.545771, -4.046836, 2.087038, -0.254846, 1.675272, 1.858252, -3.493028, -0.658172, -2.717700, -9.038601, -2.023740, 2.185417, -5.424749, -1.712286, 3.890235, 10.073957, -7.019946, 0.662037, -5.308875, -5.676610, -4.722646, -2.664748, -1.777658, -0.848570, 0.390657, -0.070973, 0.944846, 7.873178, -0.824468, 7.144648, 5.662151, -3.808916, 13.441382, -6.540015, 5.588078, -9.888226, 0.829120, 0.493990, 2.685220, -2.146703, 33.005726, -9.920277, -2.047557, -4.809439, -1.058374, -6.111153, -3.855006, 3.898750, 3.120060, 2.897796, 4.638179, 0.339269, 4.206357, 34.924412, 1.550849, -0.893083, 7.823601, -0.931050, -2.969501, 3.311508, -4.605260, 0.211484, -1.090347, -11.311889, 5.567627, 2.470628, -3.050094, -6.833628, 0.027669, 3.954453, 0.462904, 1.221832, -3.357898, -0.372250, -3.285368, -2.702124, -1.365660, -2.935486, -0.653691, 0.267228, -1.761839, -1.757823, -7.618964, -4.954926, -3.058721, 4.318247, -1.149615, -1.913772, -6.029979, 7.433734, -5.322990, 5.067607, 0.097438, 6.688019, 3.424113, -0.951026, -2.514448, 4.473936, -0.328897, 0.472957, -8.968553, 4.915695, 7.202985, -1.535644, 1.273114, 4.096374, 1.467003, 4.575773, -2.753911, -2.426697, 2.021526, 2.448324, 1.601018, -4.054155, 5.143868, 2.001997, 1.193890, 6.219207, 6.874357, -5.902385, 1.215700, -2.638806, -0.412928, -5.336220, -5.211881, -3.786383, 2.493112, 1.445182, -0.932489, 3.669335, 2.494090, -5.945648, 1.461121, -4.588869, -9.683293, 0.024478, 0.361819, 0.429738, -0.528489, 0.071427, -1.812616, 2.015117, 2.611473, -1.552477, 1.188317, -3.766112, -8.125200, 5.595769, 7.144753, 0.440344, -3.455775, -4.126338, -0.137749, 1.485938, 4.267346, -2.552121, 5.409408, -2.956439, 0.116498, 10.794524, 2.871317, 6.700639, 3.647399, 0.462681, 0.919991, -1.771236, 4.206169, 3.297713, -0.490547, -5.456108, 3.810536, 0.439151, -5.683705, -0.331633, 9.695666, 6.752079, 1.894693, -2.299804, 0.911242, 1.596185, 1.820296, -10.136162, 2.699797, -0.861554, 1.563863, -1.445469, -1.585015, -0.936888, -2.816169, -7.779940, -2.266987, -2.636140, -0.401760, 0.616410, -3.814011, -1.176419, 11.211123, -9.366578, -1.591179, -0.927374, -0.874337, -4.743129, 2.231917, -6.397682, 2.209341, 5.533977, -0.850753, -1.956051, 0.684888, -1.229650, -5.023637, 2.315219, 3.824482, 7.081928, 5.981342, -5.582659, -0.983008, 2.805578, 3.657308, -1.696202, 6.763514, -5.138957, -3.832808, -7.314121, 0.053174, 2.861154, 3.639174, 4.679485, -2.298343, -11.186035, -2.610517, -0.211195, -0.640446, 0.348537, 1.758054, 5.264797, -1.193833, -0.879477, -1.257836, 2.351809, -2.727529, -6.421523, -0.637253, 1.358360, -0.497932, 2.984409, -5.369983, 0.237885, 2.455053, -3.368424, -2.715488, 1.050363, 1.030069, -5.754048, -1.451195, 2.429502, 3.444857, 3.233912, 5.911255, 5.225487, 1.088152, -1.966296, -0.698468, 5.785910, 5.211907, -1.712197, 0.683985, 7.677290, -2.134921, -0.079519, 6.433024, 3.194132, 2.521041, 0.270111, -0.026705, 2.443631, -7.762458, -0.880217, -1.432392, -0.296257, 1.004292, -2.112697, 0.410646, -7.520607, -5.301806, -6.393757, -2.312966, 8.617706, 5.257979, 2.469394, 1.896033, -7.304307, 0.032064, -1.680473, -1.370388, 1.836908, 1.746221, -4.757438, -1.365277, -3.207575, 2.679633, -1.870035, -4.846527, 9.144346, -0.177452, -1.721346, -4.960502, 14.192567, -2.215825, -4.418038, 4.921060, -4.274836, 0.196901, 3.359635, 1.766395, 6.591709, 1.311870, 3.836223, 3.283481, -5.704985, -5.065734, -10.232748, 1.326910, 10.367500, 0.266378, 1.245203, 3.728586, 0.871024, -4.337420, 3.950523, 1.484714, 7.130711, 2.444842, 3.370905, -3.053072, -8.794883, 0.811756, -6.805107, -1.288338, -3.038558, -1.800780, 5.653497, -7.699387, 4.797431, -1.204077, 3.131383, 0.218491, 0.271107, 0.328277, -3.661891, -3.891054, -2.775194, -2.972222, 1.638716, 2.121798, 11.757519, 1.406214, -4.839445, 2.654708, -4.238147, 6.563388, 3.513842, 1.695825, -3.579199, 0.144067, -3.779405, -3.784275, 4.610184, 8.391285, 3.869660, 14.877264, -0.853013, 0.816489, 4.322730, 1.468392, -10.050632, -6.104123, 1.944591, -0.779614, 4.052341, 4.706254, 1.130013, 3.297864, -2.842252, -1.799122, -7.300198, -8.223158, 0.103784, -0.536769, -0.178908, -3.648121, -7.089283, 5.717216, 1.748043, 2.195454, -0.802349, -6.516545, 2.740543, -7.503774, -1.026417, 2.788604, 1.603290, -12.695382, 8.187833, -26.062899, 0.796168, 4.796346, 1.615061, -1.498737, 1.886548, -0.707566, 4.348446, 7.479768, 0.241852, 0.702221, 7.663441, -8.665356, 1.352373, 3.487139, -1.783125, -1.789621, -1.968932, 0.433356, 3.286088, -0.605302, -4.334078, 3.792793, -5.000621, -3.242608, 12.834573, -0.729511, -2.675239, -11.013935, 3.448772, 7.774179, 7.802071, -5.316556, 3.057752, -4.227582, 8.020033, 3.920683, -1.241657, 1.266449, -0.277056, -3.271275, 0.191526, 0.863023, 1.282676, 6.873929, 3.656773, -4.713717, 4.149897, -3.846729, -0.674714, 0.938713, -5.917807, -4.432909, 12.985623, 4.821611, -5.643327, -8.125957, -0.826768, 0.629284, -3.502351, -2.436090, 4.174616, -0.633892, 1.566435, -0.910233, -3.207411, 4.179739, -0.570571, -7.840557, -1.772896, 0.870700, -1.275693, 1.374926, -0.907516, -4.510747, -3.468122, 0.839771, -3.056328, 3.745919, 1.016371, -2.341292, 2.988811, 1.466320, -4.544544, -1.112260, -2.121435, -0.762460, 12.233142, 1.429840, -1.525554, 3.081838, -3.317194, -1.130550, -2.559003, -1.026362, -5.037518, 2.794155, -8.199506, -4.733481, 0.113747, 5.813182, -18.165855, -3.891802, 4.974901, 1.006614, -9.837774, -6.438626, -6.679924, 0.494078, 2.243184, -6.200881, 2.836529, -0.610269, -3.830256, -7.266005, 1.085588, -3.045987, -2.564099, 3.038881, 0.339688, 0.001306, 1.242169, 0.134139, 0.602918, -0.701311, -3.369164, -0.690272, 4.710167, -4.428005, -0.421637, -2.355613, 1.473268, 1.529589, -5.259464, -2.180897, -4.329997, -5.442777, 4.491558, 8.333999, 0.233608, -3.291049, -6.142815, 4.169403, -5.941311, 1.491574, 1.647497, -4.800206, 1.673341, 5.559244, 6.246997, 1.889692, 2.633429, 4.055699, 8.010838, 3.966914, 0.671864, 2.492251, 1.159096, -5.895726, 1.409254, 4.707012, -3.720183, 2.499872, -10.549520, 0.548011, 3.479353, -1.914070, -4.064875, 8.711893, 6.612379, -2.074630, 3.438460, -1.055158, -6.425396, -2.817565, -0.598246, -1.445648, -3.299649, -8.691216, -1.171440, -1.482286, 6.319604, -0.608533, 0.551999, 2.872281, 1.462533, 3.835391, -7.284638, -2.038612, -4.394158, -7.662087, 1.181028, 3.206617, -3.588524, -3.229356, 2.175489, -2.026885, -0.381162, -3.029563, -8.150112, -1.667152, 3.350580, -2.530387, -0.431616, 8.152472, -10.510705, -6.262432, 1.664065, 3.685207, 1.542331, 1.176850, 2.727885, 2.490125, 5.457391, -0.308251, 2.646002, -5.066769, -6.901775, 2.318301, 4.163167, -0.442582, -3.884129, 0.530555, -0.963692, 3.085561, -11.314219, 6.204105, -3.639430, -3.290856, -2.991646, 3.467474, 7.838357, -4.172731, 7.366910, 1.738384, 0.128589, -0.141192, 1.608539, -0.555499, -1.099028, 3.368660, 0.252946, -3.633769, 2.855535, -4.066776, 2.204027, 11.876217, -1.471280, -3.263006, 9.681908, 3.866625, 2.398274, 1.791172, 1.211074, -5.895164, 0.699426, 13.719752, 3.616782, -6.157663, -9.184650, 6.519058, 3.153225, 4.643545, -5.599307, 0.254889, -3.117306, 3.102524, -0.361742, -0.260251, -3.457507, -1.648794, 2.501627, 3.397187, -7.770206, -0.318884, -2.265900, 6.043713, 9.379510, 3.545226, 6.332065, 0.347569, -2.060041, -1.962089, 1.544432, -4.882979, -0.862780, 1.854207, -2.820076, 7.028728, 3.497764, -0.441711, -0.910925, 1.264082, 0.343511, 8.315704, -7.834203, 12.308478, 0.415701, 3.429908, -2.472554, -9.694130, -5.292675, -0.149351, -1.371549, 4.333561, -0.579647, -1.422889, -3.249985, 7.131853, -0.878708, -4.499852, -5.401291, -0.561697, 5.849093, -10.702834, -4.518972, 0.102368, -1.098199, -8.463142, 4.227396, 1.787177, -3.223487, 3.757309, -2.557846, 1.850068, 2.502924, -1.886266, 3.977140, 0.390536, 6.199437, -0.839144, -3.026297, 9.601433, -5.108625, -7.475008, 5.318777, -0.886975, -4.307914, -13.300366, 2.354438, -4.161621, -0.152817, -7.119799, -2.522202, 2.405219, -1.703956, -1.918706, 1.123279, -1.686906, -13.981764, 1.260341, 5.348891, 2.944733, 4.517666, -0.222268, -0.224630, 0.870396, 0.655100, 4.191898, -2.788587, 5.468477, 0.407466, 9.296951, -6.126083, -0.179773, 0.970539, -0.705487, -6.364262, -4.497518, 4.540085, 0.094961, -4.489572, 2.402131, -5.849391, 1.395253, 0.655383, 10.144835, 5.867474, -1.108069, -1.975885, 3.721664, 2.627710, -2.964183, 0.887506, 1.555636, -6.907562, 2.927052, 10.168860, 4.192670, -1.880741, -6.589525, 0.641212, -3.735144, 1.842830, 3.282881, -0.562061, -5.831023, 1.471113, -2.658396, -4.187063, -8.363999, 4.956432, -4.024732, -0.800226, 0.528853, -0.258082, 10.271538, -1.794757, 4.264963, -3.756393, 0.068837, 2.209610, 7.479410, -2.213324, -0.446097, -1.402409, 6.209837, -3.888666, 3.499879, 5.169998, -1.028717, 0.514249, -2.074459, -1.518194, -3.565759, -4.638311, -0.359604, 1.501896, 1.285335, 5.767209, 2.073330, -3.042041, 2.643086, -1.890087, 0.516425, -0.564762, 6.343996, -0.687559, -5.105609, -1.004164, 5.515174, -0.558512, -3.221458, -3.600777, 4.806634, -3.949126, -3.070909, 4.943271, -7.664543, -0.072120, -5.741046, -0.178641, 8.156539, 6.247648, 0.371558, -4.386340, 1.028563, -0.193440, 3.827970, -2.015718, -0.128680, 1.226461, 8.194599, -10.498024, -4.354074, 4.413433, 7.957554, -9.281338, 2.760691, 0.985617, 5.863577, -1.442804, 3.142987, 0.343291, 4.878276, -2.696698, 1.107740, -1.036489, -5.530350, 0.732239, -2.805647, 2.660924, -2.471739, 0.074897, -7.086133, -4.656291, -3.232314, 7.814073, -3.493585, -5.844809, 11.378303, -3.959454, 2.498766, 4.457907, 1.597166, -0.140635, -4.411887, -0.654702, -1.143033, -2.344702, -7.219950, -2.728144, 3.015845, -3.185435, -0.887260, 1.874124, 5.364367, -0.202549, 2.762683, 6.488287, 6.185477, -3.207197, -0.253945, -1.326026, -1.671279, 3.101617, -3.714328, -4.822799, -7.220240, 5.298695, -6.139416, -3.011972, -4.737096, 2.741168, 2.762054, 2.917991, -5.666528, -4.641473, -1.250757, -5.046134, -10.761831, -1.727711, 3.504439, -0.361030, -3.113671, 3.207125, -4.755842, 3.955856, 3.747915, -4.405138, -3.959960, -5.588688, 12.236965, -3.121061, -2.896829, 4.571880, -9.121741, 6.443601, 6.203311, 2.311942, 3.679552, 4.453959, -0.677486, 4.320968, 3.576111, 2.108275, 1.293650, -0.374192, 3.017422, -1.509517, -3.192483, 0.019659, -4.668540, -8.395880, 3.959239, -6.751495, -9.315059, 2.641012, -5.815385, 1.776897, 0.764546, 0.657157, -2.862101, 1.956879, -1.389284, 6.959363, -1.612058, 0.148994, -3.402942, 0.557323, -1.576211, 0.613227, 0.059131, 0.110311, -5.456705, -2.389134, 0.064466, 4.863774, -1.120738, -5.267430, 1.526949, -2.258410, -4.011267, -7.015226, 9.352161, 3.459421, 1.011206, 1.277637, -0.117204, 0.035243, 5.753888, -0.467889, 0.203132, 0.377401, 6.997046, 4.553281, -0.652787, -1.176425, -3.859581, 2.546398, -10.185781, 4.840391, 0.853133, -4.606481, 0.760895, -1.668669, -0.353953, -1.965285, 1.187143, 3.873412, 1.089695],
+	"falcon:latest":        [-0.145434, 0.551458, 0.579686, 0.056737, 0.122334, -0.098554, 0.453500, 0.152636, 0.006995, 0.353793, -0.057853, 0.141001, -0.251169, 0.123486, -0.386756, 0.380073, 0.113592, 0.246952, 0.221665, 0.222362, 0.110978, -0.156950, -0.095453, -0.446011, 0.380302, -0.181973, -0.355711, 0.083788, 0.529327, 0.396823, -0.235883, 0.463392, -0.453481, 0.111083, -0.109795, 0.096437, 0.184561, -0.006152, -0.404893, -0.505164, -0.512443, -0.248063, 0.264552, 0.211518, -0.408603, -0.177284, -0.358847, 0.055804, 0.302788, -0.161492, -0.030894, -0.017345, -32.815334, -0.083117, -0.250543, 0.163018, 0.198646, -0.159876, -0.472978, -0.341143, -0.634069, 0.191124, -0.064307, 0.090823, -0.458379, 0.471753, -0.101117, 0.290473, 0.348141, 0.077560, 0.459839, 0.713714, -0.385911, 0.357724, -0.028543, 0.215573, 0.420039, -0.160133, 0.720215, 0.377099, 0.286783, 0.130918, -78.763008, 0.235381, 0.492549, 0.421542, 0.292254, -0.320543, 0.125360, 0.286009, -0.266907, -0.161205, 0.468458, 0.075248, -0.607871, -0.498977, -0.248155, 0.137575, 0.217211, 0.167880, 0.721968, 0.437299, -0.006375, -0.114837, -0.217905, 0.325793, 0.548343, -0.304491, 0.050522, -0.296999, -0.015102, -0.066116, 0.326557, -0.683608, 0.848642, 0.139344, -0.386741, 0.095375, 0.137059, -0.524803, -0.308266, 0.207935, 0.910517, -0.047032, 0.016144, -0.282897, 0.030230, -0.118328, -0.216214, -0.303300, 0.196013, -0.501000, 0.369998, -0.028593, 0.309059, 0.496592, -0.330565, -0.581806, 0.117767, 0.187372, -0.144573, -0.227210, 0.181474, -0.181403, 0.144057, 0.269330, 0.046298, -0.699166, 0.538736, 0.174926, -0.561055, -0.088680, -0.007410, 0.730400, -0.471273, -0.197240, -0.027064, -0.109930, 0.149596, -0.431747, -0.033780, 0.269468, 0.410048, -0.164335, 0.079866, -0.167682, -0.260156, 0.352913, 0.227972, -0.491290, -0.305480, -0.233817, 0.048377, -0.192452, 76.728951, -0.184723, -0.035661, 0.021605, -0.363392, -0.380687, -0.095017, -0.363713, -0.331915, -0.450960, -0.384010, 98.522270, -0.376337, 0.332839, -0.049045, 0.044364, -0.076163, -0.176768, 0.273568, 0.060393, 0.105446, 0.286757, -0.061164, -0.454434, 0.415012, -0.140746, -0.040113, -0.565394, -0.060276, -0.234516, 0.312736, -0.069791, 0.352157, 0.496571, -0.248295, 0.169454, 0.254744, -0.125044, 0.095099, 0.158710, -0.174195, 0.307172, 0.600827, 0.102560, 0.584701, 0.191773, -0.210491, 0.224133, -0.133474, 0.060811, -0.040503, -0.486995, -0.257410, 0.131609, 0.188029, -0.003900, -0.226279, 0.014537, -0.214499, -0.285907, -0.500149, 0.220990, -0.061304, -0.449743, 0.320928, 0.395599, -0.405548, 0.214390, 0.296444, 0.175481, 0.196306, -0.181649, -0.175221, 0.562277, -0.083596, -0.135095, -0.226197, 0.060174, 0.425703, -0.263677, -0.442736, -0.848780, 0.140745, 0.158711, -0.246688, 0.513777, -0.133425, 0.150256, -0.165640, -0.204918, -0.390736, -0.400310, 0.078730, 0.042572, 0.127664, 0.146947, -0.031153, -0.076668, 0.239822, -0.114565, 0.338568, 0.057678, 0.276141, -0.158014, -0.047334, 0.813145, 0.096568, -0.204652, 0.069818, -0.162866, 0.474288, 0.010297, -0.042806, 0.708678, 0.266313, 0.264811, 0.178802, 0.090678, -0.153934, -0.245041, -0.249606, -0.606384, 1.116905, -0.250234, -0.134183, 0.109237, 0.162539, 0.071664, -0.028506, 0.256520, -0.270116, 0.370725, 0.072267, 0.595097, -0.444489, -0.306348, 0.375055, 0.052390, 0.689186, 0.141866, -0.022982, -0.028208, 0.692657, 0.168249, -0.289483, -0.068967, -0.145280, -0.667039, 0.606857, 0.572388, -0.461176, -0.083282, 0.357757, 0.263421, -0.338740, 0.014035, 0.136918, -0.027329, 0.405532, 0.123614, 0.228298, -0.604249, -0.259012, 0.191833, -0.188005, 0.052887, -0.036500, 0.051033, -0.017828, -0.013952, -0.076532, -0.322442, 0.034422, -0.158676, -0.380441, 0.068665, 37.625778, -1.467730, -0.198724, -0.366973, 0.094293, -0.008216, -0.019456, 0.045300, 0.241962, 0.206565, 0.200205, -0.170838, -0.239032, -0.092126, 0.364009, 0.078817, 0.044788, 0.307262, -0.396150, 0.345320, -0.246881, -0.092816, -0.252418, -0.180483, -0.254689, 0.004093, -0.200098, 0.210746, 0.029551, 0.245714, -0.063405, -0.003852, -0.331452, 0.648124, -0.156812, 0.173493, -0.454535, -0.054677, 0.309534, -0.462142, 0.451003, -0.153110, 0.369131, 0.301209, 0.173910, -0.131474, -0.194955, -0.717491, -0.016214, 0.543446, 0.239048, -0.154631, 0.153298, -0.721207, 0.015260, -0.547927, -0.272240, -1.158423, -0.387883, 0.179015, 0.290848, -0.178274, -0.537356, 0.123705, 0.337805, -0.196415, -0.124260, 0.274265, -0.027145, -0.112975, 0.271123, 0.200173, 0.309503, 0.368623, -0.130785, 0.145833, -0.102706, -0.194807, -0.355944, 0.233473, -0.422869, -0.249461, 0.275593, 0.261122, -0.041622, 0.321849, 0.351184, -0.731175, -0.455722, 0.376331, -0.073822, -0.400349, 0.297684, -0.254106, -0.179405, -0.740918, 0.396933, -0.624790, -0.245724, -0.758950, -0.761523, 0.171997, 0.582886, 0.122688, 0.454277, 0.393659, -0.125691, 0.526288, 0.245751, 0.236661, -0.268370, 0.259706, 0.471990, 0.070545, 0.262786, -0.195632, -0.373156, 0.233073, -0.319479, -0.180365, -0.356844, 0.501312, -0.404738, -0.253011, 0.381910, -0.230624, 0.322234, 0.322426, -0.406919, 0.278409, 0.189779, -0.477614, 0.398916, 0.282304, -0.097129, -0.206756, 0.078960, -0.140931, -0.327043, -0.043203, -0.238777, 0.054524, 0.345428, 0.379250, -0.028527, -0.346552, 0.135533, -0.324663, 0.097073, 0.405679, 0.144856, 0.203372, -0.135059, -0.262663, -0.038000, 0.389909, -0.832866, -0.061717, 0.439580, -0.620867, 0.043219, -0.032121, 0.448644, -0.445477, 0.479233, -0.349839, 0.376309, 0.034940, -0.093427, -0.018762, 0.237700, -0.175609, 0.521672, 0.212652, -0.147365, -0.611912, 0.433886, -0.218107, 0.044754, 0.199969, -0.084820, -0.180790, 0.020706, -0.192604, 0.911501, -0.205838, -0.328527, -0.468358, -0.292061, -0.165838, -0.239964, 0.134404, -0.085968, 0.593265, -0.035909, 0.166329, -0.038361, -0.180849, 0.134523, -0.347261, -0.383150, 0.203595, -0.527209, 0.655002, 0.036892, -0.271854, -0.722635, -0.183820, 0.041028, 0.534431, -0.321399, -0.418089, 0.205068, 0.079445, -0.313515, 0.291001, 0.376464, -0.024004, 0.494628, 0.302450, 0.075265, -0.267347, 0.131573, -0.295871, 0.160672, -0.115069, 0.890312, -0.041931, 0.229802, 0.140957, -0.566682, 0.494251, 0.228695, 0.119960, -0.250476, 0.542480, 0.438343, -0.089255, 0.539449, -0.477021, 0.099634, -0.404685, 0.306461, -0.043212, 0.233839, 0.048056, -0.186367, 0.199987, 0.047861, -0.289694, -0.537992, -0.030393, -0.089514, 0.143944, 0.128985, -0.247290, -0.148287, -0.007974, 0.302635, -0.353074, 0.140356, 0.267783, -0.438974, 0.037830, 0.136492, 0.285023, -0.161996, -0.577940, 0.466486, 0.099931, -0.207497, 0.478511, -0.477054, 0.378640, 0.114531, -0.454839, 0.205362, -0.264606, -0.345706, -0.495824, 0.015153, 0.063968, 0.570447, 0.097192, -0.171304, 0.976958, -0.167555, 0.102738, -0.436587, -0.416574, 0.246487, -0.708184, 0.023123, 0.282953, -0.282671, -0.100751, 0.016980, 0.458428, 0.210551, 0.728111, -0.064927, -0.518779, 0.023455, -0.523534, -0.352629, -0.204977, -0.031492, 0.492380, 0.037194, 0.275549, 0.049998, -0.042273, 0.024827, -0.391516, -0.194958, -0.433102, -0.268930, -0.385709, 0.035979, -0.236354, 0.036609, -0.106188, -0.051624, 0.874279, -0.019348, 0.500591, -0.124548, 0.324765, -0.367921, -0.222806, 0.540817, -0.078370, 0.043867, 0.420149, -0.059615, -0.440197, -0.087696, -0.292653, 0.111633, 0.670683, 0.455422, 0.189336, -0.042270, -0.260230, 0.051045, -0.335735, -0.506230, -0.358802, -0.266131, -0.203469, 0.257531, -0.079949, -0.371272, -0.413529, 0.099179, 0.181491, 0.150417, 0.225561, -0.346218, 0.386289, -0.052629, 0.298301, 0.905319, 0.350629, 0.162920, -0.197367, 0.032053, 0.451291, 0.156002, 0.093688, -0.061183, -0.513632, 0.197035, -0.259313, 0.299369, -0.044861, -0.141547, -0.285648, 0.202531, 0.323211, -0.128848, 0.146046, -0.771048, 0.315781, 0.132139, -0.224002, -0.046500, 0.392186, 0.015927, -0.539539, -0.217051, -0.165655, -0.112836, -0.694149, 0.339086, -0.414199, -0.237005, -0.950271, -0.279200, -0.627571, 0.069664, 0.370548, -0.062389, 0.586872, 0.213491, 0.115808, -0.041785, 0.070284, 0.475451, 0.620675, 0.153083, 0.570752, 0.077146, -0.593140, -0.395619, -0.169177, -0.077108, 0.033796, 0.110675, -0.144388, -0.484813, 0.101560, 0.118790, 0.585920, -0.051071, 0.522129, -0.096738, 0.039210, 0.130928, -0.165276, 0.021268, -0.138029, 0.452325, 0.340640, -0.240428, 0.037821, -0.356228, 0.230344, -0.362754, -0.077494, 0.106675, -0.458798, -0.373419, 0.031627, -0.154582, 0.372776, -0.041724, 0.265408, 0.351702, -0.294240, 0.822546, 0.024259, 0.029190, 0.116972, -0.298376, -0.022028, 0.036926, -0.000887, 0.198864, -0.196297, -0.319581, 0.104491, -0.003430, 0.157759, 0.118287, 0.417707, 0.329480, 0.477700, 0.300714, -0.037461, -0.082834, -0.691333, 0.119406, 0.333651, -0.015797, -0.241354, 0.322229, -0.316640, 0.336315, -0.333795, 0.152463, -0.063831, -0.093148, -0.190586, 0.866467, 0.206135, -0.333121, -0.334417, 0.427795, 0.433384, 0.030511, -0.081860, -0.327655, 0.321278, 0.308877, -0.146296, 0.250392, 0.352685, -0.261779, -0.391084, 0.343821, -0.623625, -0.232097, 0.730628, -0.009076, -0.278967, -0.024484, 0.126125, -0.272171, 0.395506, 0.385768, -0.485491, 0.060047, -0.462739, -0.056647, -0.384519, -0.158643, -0.362551, 0.475326, -0.029004, -0.547719, 0.048068, 0.214930, -0.050708, -0.737851, 0.249612, -0.273211, 0.547692, -0.119554, -0.489587, -0.183487, -0.422387, -0.021538, -0.357277, 0.442413, 0.742838, -0.306633, -0.417351, 0.665328, 0.195444, -0.137114, -0.503417, 0.106885, 0.313572, -0.132145, 0.361419, 0.209603, -0.503253, -0.481884, 0.218992, -0.205752, -0.239932, -0.071110, 0.198684, 0.044104, 0.170742, 0.087633, 0.306695, 0.128361, -0.694840, -0.178137, -0.062519, 0.269586, -0.420635, 0.441342, -0.307177, -0.113335, 0.545861, 0.245667, -0.061284, 0.463628, 0.188273, -0.425786, -0.599940, -0.498917, -0.097796, 0.110285, 0.832950, -0.219720, 0.157499, 0.090185, 0.919885, -0.057676, 0.432318, -0.276865, 0.225659, -1.100891, 0.191885, -0.538571, -0.070240, 0.178796, -0.850664, -0.281632, 0.101154, 0.292607, -0.697670, 0.065382, 0.243841, -0.446409, -0.747985, 0.421588, 0.134048, 0.014038, 0.012480, 0.093830, 0.065262, -0.028793, -0.460956, 0.001223, -0.435688, -0.216765, 0.092236, -0.205991, -0.346419, -0.150538, -0.115917, -0.034806, 0.121649, -0.155723, 0.509418, 0.161882, -0.247912, -1.040411, 0.113688, -0.232234, 0.083722, -0.356446, 0.171696, -0.551805, 0.644128, -0.361454, -0.069536, 0.344402, -0.144846, 0.593874, 0.046123, 0.071060, 0.044171, 0.109494, 0.162099, 0.016161, 0.160383, -0.657956, 0.426369, 0.430661, 0.167817, 0.371546, 0.038148, 0.175910, 0.308253, -0.379426, 0.308540, 0.028549, -0.238940, -0.216455, 0.583075, -0.325968, 0.251503, 0.521912, 0.405485, -0.058058, -0.085051, 0.486884, 0.177756, 0.437350, -0.531478, -0.309321, -0.053716, -0.577053, -0.135426, 0.037255, -0.151116, -0.249868, 0.044155, -0.143193, -0.256241, -0.094862, -0.071529, -0.179807, 0.177920, 0.322324, -0.038461, 0.146140, -0.170251, -0.189690, 0.621307, 0.123934, 0.030048, -0.024102, 0.172103, 0.412975, -0.478826, -0.629470, 0.323721, -0.306728, -0.006467, 0.321414, 0.400523, -0.001823, 0.011803, -0.120583, 0.601702, 0.185840, 0.700491, 0.149545, 0.067835, 0.470991, 0.664574, -0.552964, -0.316984, -0.231059, -0.300164, -0.558253, -0.371432, -0.427449, 0.150378, 0.205915, 0.007517, 0.848489, -0.270204, 0.645367, -0.190823, 0.118916, -0.048641, -0.555063, 0.180961, 0.193901, 0.520572, -0.087776, 0.935664, -0.117911, 0.409115, -0.214746, -0.062976, -0.582564, -0.081324, -0.010914, 0.697559, 0.827275, -0.194161, -0.525253, 0.123935, 0.110752, -0.243882, -0.464774, 0.015078, -0.350227, -0.113486, 0.011749, 0.153878, 0.241713, 0.309567, 0.159847, 0.382380, -0.121834, 0.159451, -0.704771, 0.006205, 0.008906, 0.346081, -0.160726, 0.367822, -0.023612, -0.010030, 0.374448, -0.019864, -0.096305, -0.143692, 0.101554, 0.203875, -0.122353, -0.456235, 0.368918, -0.026021, 0.096329, -0.418561, -0.096026, -0.323244, -0.289808, -0.177779, -0.380370, -0.277077, 0.225156, 0.373779, 0.756219, -0.338806, -1.064188, 0.086767, 0.112865, 0.410580, -0.160529, 0.685520, 0.343053, 0.015070, 0.034452, 0.050372, 0.006650, 0.317398, 0.658390, -0.431091, 0.506827, -0.100293, -0.258241, -0.098443, 0.440216, -0.351024, -0.425316, 0.190989, -0.198299, 0.598305, 0.592949, -0.761293, 0.291733, 0.298414, -0.596820, -0.056470, 0.506640, 0.614545, -0.038101, 0.304769, 0.047714, -0.178597, 0.606069, -0.020600, -0.324800, -0.517904, -0.244066, -0.781475, -0.347456, -0.177254, 0.442952, -0.502207, -0.313296, -0.221216, -0.158749, -0.000308, 0.210433, -0.293535, 0.143593, -0.187607, 0.410221, -0.101963, 0.038983, 0.216233, 0.109033, 0.071577, -0.274980, -0.519410, 0.748628, 0.132990, 0.017301, -0.318737, 0.462538, -0.219474, -0.257446, 0.106117, -1.033327, 0.555366, 0.314015, -0.099034, -0.101012, 0.316906, 0.525460, -0.067212, -0.234236, -0.594703, -0.301416, 0.049783, 0.360627, -0.340874, -0.108111, -0.030016, -0.068959, 0.419880, 0.079512, 0.136864, -0.870394, 0.190759, 0.264447, -0.055466, -0.274267, 0.294311, 0.145651, 0.181500, 0.068148, -0.079297, 0.116602, 0.398743, -0.596057, -0.283995, -0.631194, 0.272831, 0.028005, -0.269716, 0.821832, -0.264261, 0.296154, 0.000392, 0.268086, 0.256604, 0.438840, -0.025024, 0.229269, 0.372718, -0.111645, 0.275149, 0.074714, -0.149943, -0.677116, -0.423984, -0.055012, 0.807868, -0.793449, -0.288004, -0.217445, -0.417038, 0.048146, 0.271043, -0.240449, 0.003111, 0.200560, 0.515866, -0.112648, 0.480673, -0.153305, 0.162125, -0.360713, -0.281660, -0.433440, 0.062965, -0.287887, 0.376615, -0.010636, -0.070215, -0.413233, -0.407751, -0.569790, 0.620695, -0.445826, -0.266545, -0.143776, 0.556380, -0.358060, 0.091419, 0.154067, -0.424598, 0.678844, 0.586404, -0.293496, -0.565051, 0.211552, -0.060263, -0.154836, 0.118186, 0.659808, 0.048121, 0.028627, 0.310143, -0.108596, 0.233567, -0.028822, 0.211269, -0.069226, 0.083919, 0.067399, 0.551407, 0.423780, -0.168638, -0.212640, -0.304896, -0.148888, 0.044441, -0.146128, -0.221889, 0.151668, -0.279946, -0.454766, -0.137937, 0.329202, 0.392256, -0.123300, -0.013104, 0.020592, -0.154003, -0.141471, 0.245494, 0.342663, -0.524716, -0.256356, 0.096822, 0.377818, 0.058905, 0.199189, 0.192921, 0.046306, -0.310740, 0.375515, 0.113114, 0.067435, -0.171354, -0.187886, 0.098279, -0.443635, -0.418073, -0.076781, -0.216082, -0.459061, 0.174019, 0.092634, 0.095597, 0.229115, -0.063759, 0.166816, 0.224667, 0.389135, 0.201666, 0.280591, -0.505003, -0.199995, 0.164115, -0.018507, 0.216536, -0.228796, -0.591178, -0.108026, -0.061647, -0.155654, 0.063727, -0.290692, 0.071313, 0.164409, 0.025392, 0.333114, 0.058157, 0.341020, -0.008189, -0.657713, -0.469880, 0.380310, -0.422738, 0.146785, 0.138773, -0.181386, -0.080411, 0.223552, 0.362902, -0.642274, -0.216622, 0.244858, -0.053043, -0.255551, 0.420415, -0.162121, 0.178924, 0.692984, -0.513166, -0.353268, -0.566853, 0.654280, -0.097715, 0.617112, 0.737402, -0.252336, -0.919382, -0.002810, -0.243296, -0.697557, -0.152351, -0.020530, -0.044934, -0.028899, -0.191319, -0.194357, -0.176970, 0.400549, 0.291772, -0.242575, 0.424298, 0.457313, -0.631985, -0.030793, 0.099553, 0.011074, 0.058982, 0.193177, 0.192687, -0.396982, -0.226565, -0.427594, 0.366640, 0.581517, -0.384248, -0.604846, 0.314109, 0.230639, -0.521342, 0.201063, 0.692748, 0.048733, 0.597786, 0.063185, 0.225998, -0.180704, 0.329972, 0.150493, -0.042691, -0.159609, -0.309584, -0.275296, 0.984232, -0.030626, 0.040339, -0.253096, 0.007542, -0.286120, -0.222316, 0.158408, 0.383668, 0.429613, -0.571129, -0.086979, -0.774914, 0.035450, 0.399168, 0.141715, 0.261576, 0.757710, 0.092665, -0.444902, 0.364449, -0.026400, 0.486443, -0.191074, 0.267344, 0.391305, 0.019776, -0.162476, 0.011955, -0.169685, 0.360746, -0.057660, 4.487291, -0.752153, 0.426690, -0.514092, -0.135662, 0.128860, -0.352554, 0.339426, 0.187895, -0.105337, -0.067870, -0.561997, 0.384668, 0.139748, -0.076339, -0.383534, 0.193489, 0.217438, 0.310760, -0.471129, 0.408550, -0.783918, 0.924570, 0.544810, 0.173359, 0.325069, 0.118672, 0.157661, 0.495307, 0.043571, 0.420747, 0.461203, 0.030940, 0.079866, 0.202492, -0.143818, 0.388617, 0.284566, 0.085482, -0.424856, 0.169660, 5.002161, 0.496126, 0.368753, -0.291746, 0.049863, 0.052142, 0.562981, 0.643301, -0.097461, -0.153821, -0.237668, -0.369237, 0.234187, 0.218032, -0.617891, -0.360784, 0.184486, 0.035056, -0.343886, 0.100110, 0.015195, -0.149124, 0.577815, 0.609786, -0.190856, 0.157805, -0.111564, 0.056562, -0.264586, -0.228257, 0.143420, -0.409625, -0.407802, -0.193288, -0.178765, 0.457437, -0.001436, 0.413320, 0.456115, 0.493243, -0.467128, -0.516562, -0.063919, 0.772535, -0.142793, -0.273115, -0.326627, -0.252157, 0.386950, 0.000773, 0.080755, 0.169150, 0.207458, -0.113956, -0.269044, -0.411961, 0.196148, 0.279117, 0.476607, 0.002481, -0.446647, -0.131562, -0.290507, -0.031059, 0.522627, 0.374723, -0.189284, -0.022486, 0.121929, -0.004285, 0.320933, 0.433629, -0.872581, 0.510533, -0.047597, -0.043823, -0.172759, 0.340392, 0.585445, -0.344459, 0.179457, 0.052765, -0.247181, -0.092283, 0.166314, -0.082850, 0.624757, 0.408819, 0.517236, 0.592212, 0.266402, 0.063619, -0.332631, 0.093216, -0.002696, 0.496564, 0.017645, -0.081635, -0.045906, 0.131908, -0.360834, -0.074638, 0.884801, -0.112605, 0.152780, 0.241896, 0.178782, 0.302635, 0.096668, -0.569904, -0.235891, 0.697626, -0.153779, 0.083589, 0.118558, 0.148341, 0.138840, -0.483483, 0.039876, -0.215070, 0.378216, 0.477800, -0.376276, -0.107364, -0.056474, 0.634841, -0.829850, -0.132656, 0.166909, 0.284389, -0.751289, -0.661793, 0.215448, 0.434656, 0.023345, -0.133867, 0.439496, -0.496212, -0.117264, -0.017485, -0.372388, 1.472339, -0.363553, -0.193867, 0.281123, 0.351186, 0.103060, -0.298733, 0.369226, 0.295971, -0.239668, -1.152384, 0.448451, 0.275586, -0.237399, 0.174515, 0.195369, 0.043802, -0.264791, 0.013253, 0.040941, 0.027880, -0.142671, 0.074643, -0.553351, 0.129982, -0.100195, 0.180619, 0.071297, -0.371206, -0.193323, -0.468507, -0.805926, 0.184589, 0.080032, 0.084080, 0.275789, 0.192904, -0.078385, 0.290711, -0.027608, 0.440038, -0.587812, -0.004581, 0.162749, -0.407841, 0.368317, -0.112203, 0.299965, 0.172529, -0.076716, 0.450040, -0.303669, 0.169699, 0.115787, -0.083244, 0.287229, -0.098873, 0.045379, -0.142650, 0.225791, 0.117558, 0.331856, -0.449384, 0.030248, 0.661376, -0.147196, 0.219943, 0.449944, 0.153362, 0.580988, -0.014734, 0.219984, -0.031598, -0.087134, 0.624181, 0.321511, 0.159627, 0.065644, 0.052018, -0.334922, -0.194216, -0.214867, 0.373563, 0.569381, 0.320083, -0.218419, 0.392195, -0.208969, -0.043425, 0.549654, -0.405465, -0.393805, 0.037566, -0.388948, -0.224721, -0.294911, 0.308722, -0.081107, 0.685822, -0.048916, -0.165033, 0.142796, -0.269439, -0.222129, -0.724417, -0.286839, 0.608340, 0.327775, -58.517979, -0.544269, -0.001866, -0.332386, 0.199435, 0.252479, -0.223065, -0.405478, -0.249875, -0.104632, -0.554372, 0.442492, 0.092624, -1.184105, 0.306272, 0.094480, 0.180830, -0.188470, 0.199765, -0.158313, 0.219714, 0.490681, -0.341416, -0.366322, -0.456658, -0.166933, -0.581015, -0.068403, -0.253523, -0.303116, 0.084059, 0.248953, 0.151603, 0.113430, 0.826785, -0.997589, -0.021810, -0.476477, 0.258189, 0.772523, 0.136442, -0.239684, 0.186631, 0.024477, -0.110479, -0.279112, 0.227488, 0.287025, -0.114060, -0.046808, 0.235135, -0.282328, 0.259069, -0.037700, 0.412282, 0.450889, -0.564557, 0.362352, -0.249231, -0.283077, 0.596018, 0.210083, -0.160349, -0.145695, 0.279737, 0.432935, -0.060723, 0.205922, -0.077641, -0.005025, 0.032035, -0.147291, -0.089267, 0.022549, 0.096784, 0.316596, -0.335415, -0.154303, 9.210744, 0.439763, -0.474908, 0.547985, 0.333362, -0.083074, -0.113624, -0.358873, -0.001362, 0.342030, 0.487156, -0.212562, 0.381375, 0.187135, -0.076945, -0.455437, -0.128274, -0.070635, -0.241333, -0.100881, -0.181602, -0.364256, -0.003553, 0.078503, -0.229145, -0.102308, 0.197712, 0.027701, -0.779926, 0.536838, 0.599103, 0.012468, -0.057213, 0.015195, 0.180001, 0.458210, -0.035604, -0.381750, -0.162652, 0.062134, -0.018506, 0.322445, 0.430900, 0.494851, -0.001246, -0.063209, 0.027377, 0.572648, 0.501041, -0.330562, 0.005837, -0.250195, -0.197133, -0.322219, -0.241606, 0.777974, -0.214089, -0.517721, 0.054999, 0.079865, -0.392691, 0.436211, 0.170465, 0.012087, -0.570184, 0.342888, -0.248057, -0.206725, -0.031366, -0.455187, -0.014615, 0.157380, 0.836096, -0.526227, 0.272770, -0.238688, -0.423359, 0.184629, -0.558110, 0.024373, -0.151099, 0.561669, 0.598668, 0.297249, 0.027630, -0.355015, 0.242124, 0.076283, 0.281585, 0.539186, 0.105147, 0.421329, 0.097530, 0.683345, -0.052615, -0.135516, -0.139486, -0.133028, -0.413589, 0.058511, 0.174991, 0.026137, -0.128775, 0.032074, 0.079121, -0.097758, -0.100383, -0.140296, 0.653418, -0.458114, 0.115476, 0.015241, -0.171333, -0.002086, -0.215181, 0.295451, 0.152256, -0.074834, -0.073559, 0.242919, -0.361564, -0.105575, -0.216026, 0.285056, 0.216080, -0.017842, -0.203024, 0.165294, -0.153601, -0.077933, -0.115630, 0.081592, 0.049974, -0.676193, 0.314703, -0.164646, -0.030029, -0.488286, -0.197653, -0.107439, 0.057860, -0.215854, 1.589793, 0.217210, 0.187354, -0.106198, -0.019009, -0.011475, -0.016074, -0.082618, -0.361909, -0.087632, -0.064170, 0.235486, 0.430535, 0.326502, -0.158178, 0.298049, -0.182466, 0.113623, -0.318923, 0.001735, 0.340390, 0.232367, 0.124780, -0.329235, -0.258752, 0.232064, 0.057419, 0.071112, -0.393735, 0.243256, 0.444519, 0.086050, 0.239353, 0.491001, 0.005872, 0.332629, 0.355181, 0.453798, -0.406120, -0.000941, -0.504079, 0.147405, 0.410417, -0.263093, -0.082063, -0.330414, -0.024870, 0.603955, 0.011611, 0.247248, -0.437377, 0.374511, -0.386487, -0.052682, -0.704873, 0.137623, -0.302090, 0.611321, 0.347271, -0.128144, -0.070892, 0.555710, 0.235903, 0.620517, 0.356831, -0.088181, -0.160007, 0.040110, -0.402164, -0.132727, -0.304703, -0.037726, 0.249084, 0.463306, 0.410124, 0.206482, 0.084820, -0.448904, 0.441546, 0.493072, 0.177585, -0.202363, -0.028921, 0.279302, 0.206562, 0.210913, -0.330171, -0.465549, 0.120795, 0.345071, 0.735387, 0.512654, -0.197911, 0.433159, 0.039721, 0.779497, -0.224703, 0.258248, -0.097690, -0.123420, -0.614652, -0.402285, 0.541440, 0.066245, 0.084928, 0.117902, -0.200090, 0.050084, 0.132697, -0.505208, 0.336530, 0.370033, -0.026980, -0.182485, 0.137949, 0.097208, -0.373811, -0.432984, 0.365751, 0.116326, 0.786149, -0.336528, -0.143944, 0.276964, -0.337612, -0.537772, -0.135477, -0.157744, 0.480776, -0.016617, 0.178157, 0.447851, -0.460623, -0.082251, -0.066533, -0.221613, 0.263356, 0.364406, 0.033944, -0.270242, 0.301467, -0.181274, -0.774562, -0.336110, 0.013128, 0.379040, -0.491963, 0.510146, 0.115073, 0.120280, 0.541961, 0.340013, -0.030580, 0.080096, 0.215101, 0.109936, -0.498924, -0.311070, 0.359271, 0.007063, -0.440929, 0.494422, -0.285638, 0.636720, -0.141487, 0.515842, -0.552986, -0.224828, 0.077687, -0.618073, -0.191511, -0.243853, 0.518603, -0.099494, 0.009308, -0.480245, 0.537420, 0.192950, -0.265831, 0.051783, 0.308917, 0.155403, 0.018711, 0.211687, 0.407065, 0.442201, 0.136252, 0.084571, 0.095548, -0.225721, -0.000452, 0.138476, -0.149119, -0.664407, 0.180302, 0.024027, -0.168956, -0.060498, 0.176705, 0.197362, 0.810919, 0.100887, -0.343077, -0.250311, 0.202211, -0.533182, 0.565174, 0.071214, -0.071025, 0.294692, 0.345120, -0.369266, -0.363377, 0.094061, 0.053798, -0.675334, 0.105947, -0.366219, -0.233770, -0.146928, 0.059898, -0.260274, -0.351740, 0.509495, 0.466080, 0.533641, -0.283434, -0.170006, 0.497754, 0.113559, -0.234416, 0.006083, 0.091491, -0.393112, 0.243943, 0.068198, -0.135965, 0.071724, 0.267782, -0.299795, 0.225524, 1.235563, -0.060205, 0.328835, -0.436151, 0.408247, -0.826190, 0.398270, 0.206538, 0.275721, 0.916620, -0.380989, -0.155483, -0.165130, 0.108895, 0.192115, -0.203651, 0.378474, -0.264306, 0.075935, -0.263781, -0.194646, -0.089090, -0.463814, -0.084640, -0.277837, -0.342353, -0.024847, -0.192613, 0.186768, 0.111856, 0.178706, -0.120027, -0.541726, -0.337925, 0.118754, 0.238603, 0.208689, -0.145369, -0.150549, 0.653374, 0.094129, 0.106789, -1.053561, -0.196887, -0.240114, -0.745993, -0.152362, -1.204002, -0.082581, 0.305619, -0.058988, -0.598331, 0.185987, -0.060489, -0.423577, 0.511601, -0.325674, -0.120736, -0.115833, 0.159326, -0.285350, 0.046486, 0.190043, -0.050766, 0.455020, 0.403656, 0.323676, -0.360419, -0.349012, 0.049382, -0.285226, -0.303766, 0.506560, -0.419006, 0.309048, 0.217033, -0.084596, -0.243125, 0.644726, -0.052139, 0.160264, -0.375426, -0.394390, 0.098200, 0.259292, -0.638499, 0.021518, -0.129735, -0.101215, -0.224675, -0.042415, -0.101307, -0.089895, 0.188957, -0.548835, -0.143101, 0.536356, 0.472715, 0.358453, 0.414846, -0.370800, -0.235898, 0.262339, 0.449393, -0.263371, -0.217770, -0.226305, 0.047581, 0.003470, -0.028333, -0.130409, -0.138864, 0.496755, 0.231264, -0.030316, -0.476095, 0.256390, -0.104659, -0.518679, 0.125111, -0.423105, 0.494313, -0.181924, 0.242448, 0.280324, -0.129197, -0.375469, -0.433732, 0.134280, 0.389087, -0.506155, 0.527342, 0.194793, 0.178930, -0.333685, 0.181848, 0.243475, -0.612548, 0.289363, 0.029938, 0.110186, 0.255495, -0.144533, -0.387284, 0.345772, -0.311679, -0.045539, -0.294871, 0.597431, 0.305880, 0.200646, 0.282531, -0.158703, -0.103099, -0.169067, -0.008382, -0.079756, 0.170810, 0.399198, -0.066380, -0.000559, -0.004255, 0.231632, 0.278643, 0.301572, 0.158929, -0.362739, -0.174208, 0.090002, 0.658097, -0.753288, -0.115159, -0.916517, 0.345335, 0.315035, -0.185459, 0.099901, -0.345710, -0.024454, 0.149417, 0.203911, -0.621984, 0.475272, 0.381955, 0.285558, 0.620195, -0.230048, -0.193346, 0.426024, -0.082065, -0.505288, -0.264816, -0.614786, 0.032445, -0.211640, -0.743929, -0.352548, -0.282483, 0.083452, 0.155323, 0.063746, -0.081079, 0.143076, 0.304706, -0.302146, -0.586734, -0.018925, 0.135363, 1.372922, 0.022931, -0.203815, -0.249750, -0.096802, -0.322165, -0.087625, -0.354634, 0.158380, 0.221725, 0.366852, 0.150404, -0.269403, 0.051307, -0.228197, 0.501474, -0.508713, 0.310570, 0.105104, 0.175922, -0.160948, -0.532149, 0.789218, -0.296985, 0.621667, -0.034217, 0.269299, -0.473348, 0.419473, -0.168176, -0.059621, 0.347642, 0.095557, 0.119908, 0.189069, 0.072328, -0.592562, 0.205006, -0.456758, -0.051765, 0.186611, -0.559713, 0.498142, 0.081539, -0.126901, -0.188289, -0.577864, -0.548407, 0.401423, 0.509642, -0.104356, 0.108317, 0.898374, 0.040251, -0.698863, -0.078856, -0.188995, -0.003449, -0.061420, -0.096289, 0.266162, 0.108274, 0.346424, -0.441824, 0.258435, -0.142721, -0.439990, 0.212413, 0.165938, -0.020241, 0.303992, -0.042712, -0.240819, 0.041794, 0.002941, 0.399268, 0.036751, 0.134195, -0.134292, 0.143634, 0.516429, 0.132746, -0.935939, 0.153861, 0.304214, -0.235507, 0.430125, 0.079114, 0.117013, 0.439177, 0.716553, -0.093885, -0.044955, -0.330311, 0.550033, -0.535871, 0.010232, -0.314547, 0.230491, 0.654590, 0.102710, -0.238373, -0.516637, 0.401736, 0.037948, 0.276305, 0.443498, 0.228219, 0.352056, 0.012582, 0.220353, -0.407780, 0.438028, 0.208541, 0.175988, -0.270985, -0.525523, -0.548477, 0.057955, -0.194781, -0.003298, 0.269760, -0.233289, -0.540260, -0.169692, -0.484772, -0.114232, -0.401115, -0.451770, -0.181977, -0.505564, -0.083401, 0.376819, -0.421163, -0.201286, -0.602337, -0.222767, 0.614156, -0.016797, -0.176580, 0.508542, -0.081163, 0.134763, -0.419954, 0.037240, -0.185230, -0.205294, 0.193024, 0.417523, -0.698548, -0.807030, -0.149474, -0.094982, -0.464999, -0.068225, -0.432381, 0.094518, -0.034902, -0.376230, -0.190843, -0.014541, -0.007451, 0.503071, 0.039062, -0.476695, -1.198664, -0.111433, 0.107706, 0.328139, -0.316463, -0.217056, 0.050820, -0.164824, 0.319711, -0.208170, 0.614053, -0.104899, -0.555545, -0.287073, -0.262042, -0.200674, -0.006250, -0.231419, -0.642396, -0.137048, -0.145983, 0.069366, 0.240843, -0.658315, -0.597073, 0.463778, -0.183305, 0.119860, 0.648310, 0.299937, -0.208176, 0.022095, -0.278521, -0.177953, 0.202277, 0.155420, 0.234455, 0.239173, 0.262826, 0.519646, 0.006134, 0.463624, 0.502869, -0.273425, -0.230075, 0.078392, 0.024440, -0.299886, 0.079623, -0.031235, 0.077222, -0.575926, 0.647744, -0.685791, 0.246712, -0.258738, -0.268204, -0.050904, -0.216824, 0.030123, -0.059568, 0.032685, 0.456928, 0.447742, 0.000349, -0.330170, -0.283173, 0.339716, -0.715647, 0.350537, -0.348117, -0.425378, 0.275241, -0.037290, -0.301914, -0.418955, -0.057846, 0.038219, 0.490801, -0.169052, -0.531855, -0.126881, 0.214531, -0.016972, 0.486737, -0.307092, 0.060385, -0.312028, -0.121100, -0.474154, -0.033297, -0.117146, -0.145217, -0.534372, 0.215293, 0.089433, -0.014680, 0.038871, -0.454982, 0.468077, -0.157630, 0.546171, 0.098118, 0.372806, -0.065871, 0.243025, -0.094658, -0.116481, 0.125599, -0.121057, -0.072374, 0.656453, -0.041724, 0.033926, -0.041462, -0.456399, -0.186608, -0.389824, -0.091279, -0.003108, 0.092814, 0.340213, -0.175039, 0.666803, -0.261088, 0.368768, -0.252754, 0.163965, -0.585329, -0.243940, -0.321711, -0.133012, -0.253066, -0.076904, -0.283974, -0.197103, -0.403979, 0.147247, -0.062263, -0.075245, -0.562477, -0.205014, 0.222767, -0.054252, -0.329677, 0.815129, 0.127072, -0.426652, -0.394265, 0.127829, 0.049436, -0.196066, 0.009618, 0.540864, -0.206275, -0.050193, -0.484369, 0.663735, -0.174202, 0.404585, 0.052897, 0.197160, 0.118619, 0.260436, 0.124425, -0.006648, 0.607803, -0.084435, 0.182488, -0.911126, -0.399702, -0.234616, -0.425689, 0.574178, 0.028556, 0.346813, -0.227521, -0.021377, -0.578073, -0.755381, -0.108397, 0.211026, -0.040924, -0.294499, -0.208813, -0.170734, 0.164126, 0.122645, 0.010301, -0.073459, 0.233719, -0.109440, -0.098003, -0.205121, -0.087854, 0.028307, 0.532311, -0.228900, -0.152958, 0.527488, 0.364191, 0.192383, -0.351570, -0.234025, 0.440298, -0.060356, -0.425945, 0.171791, 0.221136, -0.396888, -0.277037, -0.666172, -0.037598, -0.010303, -0.425237, -0.211664, -0.486224, 0.277630, 0.023025, 0.408915, -0.195016, 0.047634, 0.102783, -0.485706, 0.308589, -0.411144, -0.053444, 0.714636, 0.113829, 0.033845, 0.443375, -0.068617, -0.390009, 0.075894, 0.448207, -0.177000, -0.023104, -0.175386, 0.555945, 0.029201, -0.457334, -0.357339, 0.378139, 0.238245, -0.110313, -0.500821, 0.104544, -0.281480, 0.587670, -0.373495, -0.031328, -0.065652, -0.002243, 0.044564, -0.489538, 0.399526, -0.477148, 0.174514, 0.234356, -0.095146, -0.111628, -0.140329, -0.014722, 0.120857, -0.577776, -0.435228, 0.355946, -0.013572, -0.721267, -0.317641, 0.217404, 0.607119, 0.134049, -0.278283, -0.041902, -0.119571, 0.305286, -0.412544, 0.012801, 0.406974, 0.057604, -0.505895, 0.571090, 0.132650, 0.261956, 0.170358, -63.303562, 0.072853, -0.519040, -0.178422, 0.119113, 0.378141, 0.063010, 0.112086, -0.068155, 0.143270, -0.695979, -0.185095, 0.029685, 0.101319, 0.243841, -0.099949, 0.537030, -0.507941, -0.072316, -0.282923, -0.095952, 0.277304, 0.279224, 0.180773, 0.588230, -0.293736, -0.419753, 0.124619, -0.439369, -0.203452, 0.322170, 0.218235, -0.135500, 0.153501, -0.101683, -0.188600, 0.075205, 0.135442, -0.469343, 0.088359, 0.188366, 0.252388, -0.129069, 0.600527, 0.231637, -0.129259, -0.012753, -0.375286, -0.155583, 0.010971, 0.067799, 0.301400, -0.200182, -111.950508, -0.203151, 0.112066, -0.561678, 0.199826, 0.240353, 0.024038, -0.051374, -0.191646, 0.126553, 0.212573, -0.397948, -0.364226, -0.430931, -0.275204, -0.089596, -0.264655, 0.148102, 0.095208, -0.158669, -0.152007, -0.115978, -0.417394, -0.417549, 0.121950, -0.005634, -0.209508, 0.026816, 0.097941, 0.356630, -0.327798, -0.577439, 0.128759, -0.181856, 0.341304, -0.401386, -0.326418, 0.910050, 0.602965, -0.501181, 0.499152, -0.081410, -0.445701, 0.283482, 0.128661, -0.162188, 0.477510, -0.479452, 0.445276, 0.613333, -0.230295, -0.706247, -0.385147, -0.216893, -0.355799, -0.426881, 0.294281, 0.029298, -0.469063, 0.032645, 0.377577, -0.136977, 0.229560, -0.248002, 0.231691, 0.195523, -0.626372, 0.487773, -0.046927, 0.258056, 0.435159, -0.148099, -0.214715, 0.064264, -0.436486, 0.581707, -0.436593, 0.105906, -0.026306, 0.014113, 0.179391, -0.207917, -0.343595, -0.005207, 0.196769, -0.042901, 0.063794, 0.336295, -0.545930, -0.357988, 0.289421, -0.300983, 0.284619, -0.280840, 0.172100, 0.115409, 0.575850, -0.238892, 0.714082, -0.077031, -0.006099, 0.256360, -0.335220, 0.010254, -0.108844, 0.427106, 0.144753, -0.501155, 0.062318, -0.124000, 0.221674, 0.834580, 0.110858, -0.012859, 0.568384, -0.610597, 0.660640, -0.116576, -0.069433, 0.786274, -0.164275, -0.076165, -0.051904, 0.149984, 0.419948, 0.000599, -0.677398, 0.008406, -0.954607, 0.185710, 0.442692, 0.498803, 0.072520, -0.947522, -0.119591, -0.001380, -0.499802, 0.019757, -0.521202, -0.370886, 0.026982, 0.187350, 0.188310, 0.041130, 0.150330, -0.069001, 0.405078, -0.064037, -0.170146, 0.371683, 0.318880, -0.644247, 0.235556, -0.033404, -0.507976, -0.227250, 0.002788, 0.061252, -0.474965, -0.231972, -0.367271, 0.486901, -0.053223, -0.059750, -0.304050, -0.058611, 0.157256, 0.175846, 0.395777, 0.008041, -0.273340, -0.205810, 0.243421, -0.182496, -0.391188, 0.377847, 0.187971, 0.037746, 0.198323, -0.077953, 0.698547, -0.498333, 0.330154, -0.068499, 0.217126, 0.308641, 0.351311, 0.470557, -0.105092, 0.424691, 0.009478, 0.033756, -0.260245, 0.310565, 0.035176, 0.096856, -0.544571, 0.565485, -0.450896, 0.238636, -0.755101, 0.350827, 0.415188, -0.067474, -0.598875, 0.159576, -0.031585, 0.304653, 0.366733, 0.508857, 0.094383, 0.099464, 0.119461, 0.115205, 0.158623, 0.196619, 0.236457, 0.215316, 0.612975, 0.200485, 0.058877, -0.025619, -0.205582, 0.714231, 0.136720, -0.388014, -0.196395, 0.079210, -0.423071, -0.123676, 0.474725, -0.050195, 0.970900, -0.171853, -0.083888, -0.179501, -0.064880, -0.223606, -0.006045, -0.080197, 0.252009, -0.350478, 0.026701, 0.014843, -0.027661, -0.832059, 0.008524, 0.424898, -0.018123, -0.540465, -0.423612, -0.029289, 0.013382, -0.661031, 0.262038, -0.232594, 0.666213, 0.276438, 0.294243, -0.137279, 0.595431, -0.343497, -0.335292, 0.270682, -0.050774, -0.357238, 0.464678, 0.166497, -0.214094, 0.412270, 0.237754, -0.456134, 0.069817, -0.240430, 0.211049, 0.282824, -0.002317, 0.005693, -0.191463, -0.583706, -0.221818, -0.012686, -0.338345, -0.070691, -0.581066, 0.068721, 0.214868, 0.449330, 0.286828, -0.724386, 0.094391, -0.135015, -0.060382, 0.069094, 0.477051, -0.039083, -0.416415, -0.160632, 0.307268, 0.644104, -0.157020, -0.185252, 0.103966, -0.107401, 0.200582, -0.256117, -0.471047, 0.576146, -0.426443, -0.473182, 0.363728, -0.448481, 0.198918, 0.262472, -0.481285, 0.124921, -0.790910, 0.555112, -0.416907, -0.686696, 0.157033, 0.151132, -0.101561, -0.165456, -0.120177, 0.040872, -0.353821, 0.464011, 0.356632, -0.641347, 0.002591, -0.529365, -0.375631, -0.088245, 0.157345, -0.168277, -0.077418, -0.104852, -0.224176, -0.583345, -0.200973, 0.330438, 0.095610, 0.044952, -0.111227, 2.766827, -0.075490, -0.004877, 0.122718, -0.211410, -0.255072, 0.014199, -0.044222, -0.232582, -0.213509, -0.058883, -0.111578, 0.575816, 0.382956, 0.168038, -0.660369, -0.317659, -0.287882, -0.159493, 0.303753, 0.161251, -0.084348, 0.177310, -0.023731, -0.208123, 0.375143, -0.130619, 0.013847, 0.084544, 0.385289, -0.844378, 0.783994, -0.133632, 0.453322, -0.368930, 0.468662, 0.074394, -0.085951, -0.492908, -0.102703, -0.070889, 0.019265, -0.132152, -0.490296, -0.154806, -0.438457, 0.106065, -0.362225, 0.098467, -0.300806, 0.082323, -0.066897, 0.534250, 0.646450, 0.452268, -0.157001, 0.063397, -0.202829, 0.173803, -0.187386, 0.587771, -0.348677, -0.511564, 0.085221, -0.224883, 0.157183, 0.246904, 0.638365, -0.585779, -0.287903, 0.340373, -0.030720, 0.061279, 0.074192, 0.301575, 0.018036, -0.261474, 0.063272, 0.015081, 0.398179, -0.035079, -0.370379, -0.052801, -0.530252, 0.093011, 0.002836, -0.697104, 0.546006, -0.332340, -0.122048, 0.289079, -0.541533, -0.503000, -0.054692, 0.134673, 0.006598, 0.017476, -0.134000, -0.175561, -0.208997, -0.003923, 0.051280, 0.086685, -0.240404, -0.357857, 0.243271, 0.566339, -0.173989, -0.064073, 0.295804, 0.233289, -0.092659, -0.665500, 0.190993, 0.167569, -0.278074, -0.244135, -0.168051, -0.463405, -0.177617, -0.034471, 0.007265, -0.297528, -0.075948, -0.047524, 0.122029, 0.211117, 0.120729, -0.014987, -0.632226, 0.213933, 0.220751, -0.416721, -0.432320, -0.204294, 0.197827, -0.168607, 0.240733, 0.250329, 0.280815, 0.289349, -0.309672, 0.147626, -0.490905, 0.131334, 0.044870, 0.244089, -0.017633, -0.301772, -0.072316, 0.146994, -0.293942, -0.420279, 0.161965, 0.148996, -0.126067, 0.491702, -0.119142, 0.063983, 0.395035, -0.213297, 0.140833, 0.196409, -0.048128, 0.017225, -0.505947, 0.664136, -0.202357, 0.317128, 0.168667, 0.093162, -0.380574, -0.092151, 0.044294, -0.870981, 0.241743, -0.536381, -0.090947, -0.648787, -0.355641, 0.165425, -0.238059, -0.596338, 0.266993, -0.339680, -0.382530, 0.291458, 0.768516, -0.176065, -0.569123, 0.096203, 0.148821, 0.546749, 0.138208, -0.365413, 0.518124, -0.101714, 0.804016, -0.252335, 0.263548, 0.344599, -0.042573, 0.189019, 0.249793, -0.605840, 0.522157, -0.359445, 0.140590, -0.134246, 0.302941, 0.272368, -0.274690, 0.744960, 0.187399, 0.031517, -0.522490, 0.492635, -0.265579, 0.556953, 0.002599, -0.475620, -0.273551, 0.061263, -0.107502, -0.058321, 0.335491, 0.093252, 0.338381, 0.580982, 0.004722, 0.306945, -0.357699, 0.204085, -0.403294, -0.339715, 0.038231, -0.318873, -0.157163, -0.286547, -0.014830, -0.408272, 0.646497, -0.297623, -0.716684, -0.037028, -0.359614, -0.607903, -0.080210, -0.243790, -0.372580, 0.327745, 0.174814, 0.007795, -0.323086, 0.278917, 0.193464, -0.183376, -0.158650, -0.150630, 0.237812, -0.554636, -0.307655, -43.962406, 0.747826, 0.170216, -0.375895, -0.110257, -0.561347, 0.516887, -0.050687, -0.239293, 0.075187, 0.214272, -0.583057, 12.469608, 0.157744, 0.439794, 0.303756, -0.331535, 0.010205, 0.261059, 0.047994, 0.124434, 0.148169, -0.040808, -0.038213, 0.568745, 0.759167, -0.161330, -0.147977, 0.209662, 0.184281, 0.034607, -0.435261, -0.914145, -0.448240, 0.502655, 0.171096, 0.162151, -0.330909, 0.644280, -0.230030, 0.400959, -0.398285, 0.243113, 0.819535, -0.015852, -0.416345, -0.035277, 0.475769, -0.331067, 0.097865, 0.243495, -0.104292, -0.019232, 0.216380, 0.644561, -0.040559, 0.058224, -0.022566, -0.086129, -0.033864, -0.107755, -0.452065, 0.232334, 0.260739, -0.491365, -0.583293, -0.385895, -0.032558, 0.422507, -0.104322, -0.199052, -0.450761, 0.188655, -0.015979, 0.101417, -0.108862, -0.445502, -0.133705, -0.177996, -0.163099, -0.119531, -0.171117, 0.427125, 0.238072, -0.338347, 0.201365, -0.159817, -0.007986, -0.612904, -0.590535, -0.537798, 0.119414, 0.221465, 0.378790, 0.208833, -0.014505, 0.360619, -0.548349, 0.216484, 0.256683, 0.385207, 0.706974, 0.283812, -0.470752, 0.290828, -0.313211, 0.595083, -0.867086, -0.099950, -0.006093, 0.328319, -0.174707, 0.061710, -0.149805, 1.078194, 0.353151, -0.642559, -0.077821, 0.227179, 0.333391, 0.007280, 0.522016, 0.349228, -0.273700, -0.092780, 0.020995, -0.248111, -0.178455, 0.095094, -0.057657, 0.430333, 0.131980, 0.129095, 0.375191, -0.076389, -0.125741, 0.223384, -0.356347, 0.353186, -0.023569, -0.242601, -0.549070, 0.215102, -0.368639, -0.316551, 0.274893, -0.523182, 0.007249, 0.256059, -0.174845, 0.097209, -0.344082, -0.195790, -0.205222, -0.601514, 0.215583, -0.090739, 0.279482, -0.269076, -0.103268, 0.192448, 0.343325, -0.083246, -0.548744, 0.427873, -0.101012, -0.624601, 0.132522, 0.332115, -0.602158, 0.100890, 0.363860, -0.269838, 0.326628, 0.123457, -0.670479, -0.476265, 0.158514, 0.236981, 0.179209, -0.047529, -0.307782, 0.100836, -0.113148, -0.232984, -0.231841, -0.206003, 0.090617, 0.506436, -0.088726, 0.060476, 0.503427, 0.701742, -0.349184, 0.198925, -0.189434, -0.242362, 0.060725, 0.099748, 0.038268, -0.391647, 0.629047, 0.516061, 0.209719, 0.497822, 0.031479, -0.268111, -0.201153, -0.449376, -0.248165, 0.497311, -0.135929, -0.426607, 0.067370, -25.183310, -0.217668, 0.244560, -0.113699, -0.034418, 0.279075, -0.087385, 0.090707, -0.057644, -0.038125, 0.556788, -0.185287, -0.570150, -0.224455, 0.177524, 0.295346, -0.079438, 0.456904, -1.079334, -0.205251, -0.241942, 0.030973, -0.412797, -0.039332, -0.078231, -0.625990, -0.752923, -0.103901, -0.260115, 0.343021, -0.274866, -0.338814, -0.084887, 0.440932, 0.021582, 0.504624, 0.122857, -0.114787, 0.298529, 0.105958, 0.489619, 0.023746, 0.315594, 0.449650, 0.291635, 0.456601, 0.009884, 0.411796, -0.053264, 0.003709, 0.146712, 0.551897, 0.747416, -0.292591, -0.418243, 0.235221, -0.076943, -0.266084, -0.249324, -0.296882, -0.198107, -0.645373, -0.300805, 0.539361, -0.779099, -0.247917, 0.190751, 0.161664, 0.476388, -0.065680, -0.249199, 0.418452, 0.159107, 0.429605, 0.161158, -0.122370, 0.595451, 0.982097, 0.093595, -0.179688, 0.247968, 0.154720, 0.151772, 0.511285, 0.429992, 2.756784, -0.477258, 0.210764, 0.060735, -0.206560, 0.050323, 0.226779, 0.488080, -0.322912, 0.215526, 0.079592, 0.581980, -0.074688, 0.156174, -0.439357, 0.177422, 0.084449, -0.244841, 0.041477, -0.124318, 0.522598, 0.331280, 0.219801, -0.001475, 0.191698, 0.235577, 0.397007, -0.140321, 0.127137, 0.326186, 0.080475, -0.198508, -0.016927, -0.647423, 0.118334, 0.151125, -0.260203, 0.370355, -0.372183, 0.684103, -0.044755, 0.411336, -0.508753, 0.137960, -0.578130, -0.308091, 0.662036, -0.223479, 0.528630, -0.088039, 0.301694, 0.581005, -0.571935, -0.326560, 0.074047, 0.560205, -0.517768, -0.118131, -0.825478, -0.052467, 0.087111, 0.156990, -0.125347, 0.200030, -0.232904, 80.721504, -0.353159, 0.366250, 0.661608, 0.076759, -0.675517, 0.160898, -0.327791, -0.218044, -0.286360, -0.110733, -1.071516, -0.166569, 0.236425, 0.076636, 0.110097, 0.025876, 0.136520, -0.289324, -0.052212, -0.116298, -0.413791, -0.261230, -0.305566, 0.213748, -0.213800, -0.181269, 0.551312, 0.220690, 0.754416, 0.246657, 0.569155, 0.251539, 0.176920, 0.161606, -0.531014, -0.195418, -0.095278, 0.553006, -0.337154, -0.577858, -0.308738, -0.046533, -0.260912, -0.037302, -0.461034, -0.245663, -0.409261, -0.124479, 0.556365, -0.376639, 0.100875, 0.092979, 0.564593, 0.361897, -0.145732, 0.145205, -0.663406, 0.029300, -0.542234, 0.114352, 0.108448, 0.103333, -0.109936, -0.598299, 0.671874, 0.007481, -0.067422, -0.047742, 0.225532, -0.224111, 0.202110, 0.233249, 0.132545, -0.249386, -0.251535, -0.008667, 0.486993, -0.417082, 0.657399, 0.234556, 0.103305, -0.333804, 0.601285, 0.350520, 0.213891, 0.336645, 0.362093, -0.728231, 0.082136, -0.602861, -0.315520, -0.174943, 0.939620, 0.721859, 0.055698, -0.052252, 0.005834, 0.295090, -0.253800, 0.549283, 0.408780, -0.037937, 0.030686, 0.093003, -0.155114, -0.104932, 0.285636, 0.134702, -0.235902, -0.073232, -0.016380, -0.196810, -0.231825, -0.039652, 0.191879, 0.708907, 0.046643, -0.679296, -0.272560, 0.220830, 0.632073, 0.011146, 0.338630, -0.006068, -0.417746, -0.790530, -0.044891, -0.112715, 0.119640, -0.555511, 0.385198, 26.227066, 0.327086, -0.359925, -0.246532, 0.315119, 0.315461, 0.184825, 0.874743, -0.243538, 0.034515, -0.099371, 0.384990, -0.588993, -0.329512, 0.202681, 0.108792, -0.522285, 0.317060, 0.258312, -0.340065, 0.353208, -0.226905, -0.467262, 0.197749, -0.205172, -0.173064, -0.003351, 0.592039, -0.122060, 0.194374, -0.540868, -0.138176, 0.341844, 0.081864, 0.242733, 0.373292, -0.228865, 0.148295, 0.120463, -0.094172, -0.023198, -0.016959, 0.237407, -54.570545, -0.573048, 0.675045, -0.190351, 0.164020, -0.054506, -0.056234, -0.145207, 0.239472, 0.296177, -0.228059, 0.566965, 0.066044, -0.390903, -0.266558, -0.301220, 0.327292, -0.387228, -0.326956, 0.046026, -0.537017, -0.113912, 0.009175, -0.175402, -0.095018, -0.330905, 0.084470, -0.136133, -0.354516, -0.469037, 0.438247, -0.231660, 0.032670, -0.097507, 0.094830, 0.058257, -0.154819, -0.390733, -0.600798, 0.396636, -0.294010, -0.608886, 0.159131, -0.488123, 0.253940, 0.310930, -0.223843, -0.505954, -0.019320, 0.451123, 0.033688, -0.305418, -0.521554, -0.435363, 0.323155, -0.312042, -0.008847, -0.252040, -0.180144, -0.310767, -0.178879, 0.130031, 0.857571, -0.107182, -0.299440, 0.559908, 0.155175, 0.366574, 0.170108, -0.399408, 0.217482, 0.401690, -0.321029, -0.045608, -0.068220, 0.157180, -0.177041, -0.105864, 0.044081, -0.435274, -0.008077, -0.537582, 0.047345, -0.313762, 0.100752, 0.147813, 0.158115, 0.128763, 0.052037, 0.156659, -0.200804, -0.530616, 0.043076, -0.484326, 0.115019, -0.426879, 0.149550, -0.379401, 0.265588, 0.110010, -0.137560, -0.046419, -0.190895, 0.185832, -0.424423, 0.238436, -0.331180, -0.208932, 0.093624, -0.136191, -0.024762, -0.284244, 0.031795, -0.036643, 0.343705, 0.086551, 0.040446, 0.576904, -0.533034, 0.577420, -0.326808, -0.414999, 0.202424, 0.118209, 0.829163, 0.636830, -0.537408, 0.020064, 0.206316, -0.161511, -0.346788, 0.161839, -0.026227, -0.223244, 0.261731, -0.382454, 0.108277, -0.355422, 0.103880, -45.383808, -0.162695, -0.150417, -0.127754, -0.070344, 0.119024, -0.116959, -0.034194, 0.281006, 0.047009, -0.478871, -0.337656, -0.401619, 0.260010, 0.109437, -0.230819, -0.169927, 0.253672, 0.187743, -0.142163, 0.026885, 0.312044, 0.012883, -0.075687, 0.187942, -0.864674, -0.161560, -0.180769, 0.423245, 0.286899, -0.322536, 0.082467, 0.129985, -0.038326, 0.400418, -0.058516, 0.277353, 0.456197, 0.539164, -0.083526, 0.026757, 0.039200, -0.235375, 0.052175, -0.201285, -0.225762, 0.416601, -0.418963, 0.252038, 0.136844, 0.261401, -0.321984, 0.021641, 0.297252, -0.045968, 0.015987, -0.632211, 0.079325, 0.178558, -0.686958, 0.065480, 0.519709, 0.642042, -0.067845, 0.037921, 0.154914, -0.014848, -0.024645, 0.106363, -0.512707, 0.045547, 0.496962, -0.158557, 0.217693, 0.007438, -0.582735, -0.274856, 0.251478, -0.168064, -0.218378, -0.187458, 0.235936, -0.303580, 0.133740, 0.433543, -0.416777, 0.163158, -0.312299, -0.542848, 0.314374, 0.362104, 0.192649, 0.357390, -0.229162, -0.168068, -0.437465, 0.052538, 0.281516, 0.368737, -0.097629, 0.968332, 0.208670, -0.460906, -0.274308, 0.238028, 0.236472, 0.210751, 0.070369, -0.012518, 0.160674, 0.005216, -0.122002, 0.292114, -0.093093, 0.153105, -0.204351, 0.578618, -0.321862, -0.400429, 0.156500, 0.084270, 0.381356, 0.067640, 0.051333, 0.399535, -0.307002, 0.340882, 0.135507, 0.089646, -0.237054, 0.281415, 0.090153, 0.462730, 0.171334, 0.454288, -0.291779, -0.587445, -0.012028, -0.045764, 0.449051, 0.142538, 0.409746, -0.105807, 0.066156, 0.277036, 0.749279, 0.060583, 0.179168, 0.016475, -0.148920, -0.023366, -0.071207, -0.346201, 0.011620, 0.048880, 0.374774, -0.428500, -0.617913, 0.200059, -0.312047, -0.069751, 0.212112, 0.469310, -0.322054, -0.436160, -0.151759, -0.362400, -0.100673, -0.390196, -0.720022, 0.013152, -0.093669, 0.500391, -0.181266, 0.171757, 0.205245, -0.491997, 0.150053, -0.303285, 0.213764, 0.181915, 0.042309, 0.117224, 0.243289, 0.163959, 0.249842, 0.096863, -0.027731, -0.371531, -0.153938, 0.580561, 0.096046, -0.302468, 0.400171, 0.505695, -0.165858, -0.123545, -0.023400, 0.045407, 0.485218, -0.918825, -0.379058, 0.030079, -0.237506, -0.267864, -0.423086, -0.861124, 0.339907, -0.120479, -0.469097, 0.068623, 0.029885, -0.089538, 0.517054, 0.389653, 0.260604, -0.697998, -0.163827, 0.174017, -0.102305, 0.016205, -0.635075, -0.597663, -0.165878, 0.092621, 0.079050, 0.743767, 0.460755, 0.141955, -0.437859, 0.222894, 0.521355, -0.322920, 0.021801, 0.437041, 0.586200, 0.036765, 0.155797, 0.139985, -0.041782, -0.709892, 0.013074, -0.547880, 0.155922, 0.070960, 0.569953, -0.219364, -0.014088, -0.335218, 0.293984, -0.440478, -0.020124, -0.123338, 0.212336, 0.409897, -0.151092, 0.206929, 0.108662, 0.003318, 0.480850, -0.388049, -0.373116, 0.508014, 0.646062, 0.338924, -0.014462, 0.072739, 0.503070, -0.571416, -0.053392, 0.261631, 0.593776, -0.324953, 0.396804, 0.309049, 0.453608, 0.104420, -0.480800, -0.556374, 0.342257, 0.100447, -0.134773, -0.252847, -0.256652, -0.206923, 0.125899, 0.127910, 0.259252, -0.247190, 0.271972, -0.559636, -0.121707, -0.268752, -0.576536, -0.150227, 0.483635, -0.634572, 0.468136, -0.471012, 0.473711, 0.300323, -0.380895, -0.137505, -0.299252, 0.208652],
+	"falcon2:latest":       [-1.275306, 3.837295, 3.104335, -0.132523, 3.717942, 0.372535, -1.772705, 3.119587, 0.330449, -0.700744, -2.615506, 10.353819, -2.361413, -2.383462, 1.663645, 2.710775, -4.230299, 2.722366, -0.357314, -2.037119, 1.449030, -4.184108, -1.876729, -0.485423, 1.748516, -0.127038, -3.764583, 0.180338, -1.962256, -1.628213, -0.470965, 0.818017, 0.607546, -1.474351, 3.853329, -2.645384, 0.169052, 1.954102, 2.811140, 0.609534, 0.137802, -0.385182, -3.722800, -2.822938, 1.028885, 4.215508, -1.851904, -0.174632, 0.901177, -1.909700, 1.275259, -3.331348, -0.757433, 0.559766, -3.048050, -1.044024, 1.142050, -0.407469, -3.040350, 3.580981, 4.013700, 3.739869, 0.390057, -0.821299, 0.094822, 3.214658, 2.524949, -0.300330, 0.556939, -3.069934, -0.515992, -2.658362, -0.433554, 9.095484, 2.195112, -3.849095, 5.091149, 0.481200, -1.134630, -0.239280, 0.887909, 1.042396, 2.646552, 1.802093, 1.645549, -3.790758, 0.988161, -1.200287, 1.894759, -1.166587, -0.253824, -1.039379, -2.276851, -0.460683, 0.297967, -3.424223, -4.195358, -3.542181, -1.769864, -1.338850, 0.911988, -2.302943, 2.484190, 2.044442, -4.596624, 1.267139, -2.049352, 2.546231, 0.792628, -1.380133, -0.790615, 6.012039, -0.964435, -0.969051, -1.812450, -0.890375, -0.798536, 4.244349, 0.251844, -1.319340, -8.636320, -0.711426, -0.413526, -0.978065, -2.059388, 3.617761, 0.892199, -0.481894, 2.143296, 0.515694, -0.860783, 4.284967, 0.612026, 4.006313, -0.335522, 0.782485, -0.058630, -0.286828, 0.756606, -1.973024, -1.502667, 0.269806, 2.558606, -0.341148, 3.807795, 2.500325, 0.260572, 3.994771, 3.990319, 4.135689, -5.712500, -2.158962, 0.670838, 1.928773, 0.011099, 2.180364, 4.558554, -5.859202, -1.996603, 1.865865, -2.038666, -2.903919, 2.060443, 0.326185, -0.932059, -1.541007, 3.649105, 1.813717, 1.723021, -0.905025, -3.768096, -1.637242, 3.691015, -2.636121, 1.495227, -0.221481, -2.330304, 1.152045, 3.441514, -1.925960, 0.132836, 1.376882, 0.099456, -0.139153, -2.827578, 3.249744, 2.968992, -3.029808, -0.102355, -3.398517, -2.040715, -2.388101, -0.241652, -3.439781, -1.627563, -3.316260, 1.659161, -1.469869, 2.303240, 0.234823, -2.916763, -3.792274, -3.562071, 1.947090, -0.283635, 1.218568, 3.385237, -0.217122, -2.976565, 1.250648, -1.260653, 1.940458, -2.756780, 1.184896, 2.428695, 0.372655, 2.108495, 2.850650, 1.011860, -3.017907, -1.272859, -5.748411, -2.802590, -1.651772, -3.874090, -6.691458, 1.628203, 0.902252, 1.731206, 1.233933, 2.390242, 4.491738, 1.581355, 1.289003, -3.980048, 0.603783, -3.222681, -0.143940, -4.617625, -2.092155, 0.132560, 0.233982, -1.955884, 3.025106, -1.231423, 1.970144, 1.114148, -0.862942, -0.308265, 0.604513, 1.433046, -3.165818, 5.211211, -0.752265, -0.261660, -0.435479, 0.435807, -3.130022, -3.382930, -0.302572, -0.304755, -0.875783, 2.048395, -1.610556, 0.004490, 3.130698, 2.056630, -0.268167, 1.429440, -1.549552, 2.836768, 1.683631, 0.112271, -2.846099, -3.467909, -1.734015, -1.098224, -0.865726, 3.147281, -1.294847, 0.867880, -0.441601, 0.848505, 0.525628, 3.181535, 3.977831, -2.883601, 1.395165, 1.380046, -1.757408, 3.363719, 2.468548, 1.804424, -1.532946, -2.130015, 2.167905, 0.806237, 2.135587, -0.616036, 2.803680, -2.744563, 3.122983, 2.290845, -1.941239, -9.350484, -3.405049, -2.718211, 0.868880, -11.208566, -2.547097, 1.621465, 0.609452, -2.762423, 0.948383, 2.396591, -3.263968, 2.348562, -0.143500, -0.433124, 3.383017, 1.096289, 0.702994, 1.397233, -3.194034, 3.071927, -0.490006, 1.249623, 0.095233, 1.227319, 2.841045, 1.574195, 0.175490, 4.525906, 2.250069, 4.384015, 0.057987, 0.493614, 2.313399, 1.154731, 3.197756, -1.679319, 1.088407, -2.359752, -0.140352, 2.276346, 1.908259, -1.822850, -7.785120, -3.179761, -1.742176, 1.736056, 0.866745, 1.359070, 1.303184, -2.294070, 0.053626, 0.213622, -3.149813, -1.677805, 5.298675, -4.036638, -1.087946, -9.100941, 0.924279, 2.780944, 0.060562, 0.627181, -3.341824, -3.136374, 1.235826, 3.671900, -3.921594, -3.728671, -0.480427, 6.906900, 2.382487, 2.390993, -2.643352, -0.856610, -1.719015, -1.395070, -0.955716, -0.926107, -0.762820, -0.575991, 2.222496, 0.387985, -1.718496, 2.484186, -1.179515, -0.119917, -2.718995, -0.767698, -0.807503, 0.944953, -0.757398, -2.630700, -2.638915, -2.941369, -2.706702, 0.776204, 1.073829, -2.168399, 2.919386, 0.639097, 2.180970, -3.533285, -3.003216, 0.523227, -2.920606, -1.932676, 3.508812, -0.450955, -2.594667, -2.833239, 0.684165, 1.521983, -4.122576, 0.048962, 6.750394, -0.751528, 4.363237, 2.375494, -1.676940, -0.282620, -1.074734, 5.402434, -1.364364, 2.213356, 3.225933, 0.355178, 0.533272, -0.000414, -1.026736, -3.608583, 3.379501, 0.643868, -0.129584, -2.414845, 0.739444, 1.379362, 1.128999, 1.212591, 2.126804, -3.392489, 2.190962, -1.066917, 0.072771, -1.777203, 0.017392, -0.069404, 2.575250, -0.143043, -1.058601, 0.293741, 1.353118, -1.655753, -2.091888, 0.584350, -0.720455, -3.424674, -1.981546, -9.107293, 1.949826, -1.522261, -2.681044, -8.341246, -3.131934, -0.714103, 0.207412, 2.508620, 1.767241, 2.730778, -0.825572, -4.030791, 0.238825, -0.371033, 0.656116, 3.989294, 1.633813, 0.325307, -0.367170, 3.002364, 5.156171, 1.634276, -0.971831, -1.604515, -2.526582, -2.878766, 2.807352, 1.909165, -3.935943, -1.386310, 3.170664, -1.649927, 2.220213, 1.246705, -3.735313, 0.272806, 2.752307, 3.109334, -0.102484, -2.823759, -0.632412, 4.712577, 0.411905, 1.390527, -0.006540, -0.578229, -0.902223, 2.687959, 0.999556, 5.464525, -1.069448, -1.659709, 1.730871, 3.159081, -3.505104, -1.294691, 0.470974, 2.520426, -0.375588, -1.659444, -2.902283, -0.643385, 0.652468, 0.084223, -0.908147, -1.238602, -1.975286, -2.141201, 0.736527, -1.523522, 1.027745, 3.354010, -2.870191, -2.455250, -0.858900, 1.902766, -1.251234, -0.778018, -0.829765, 1.343457, 3.192241, 2.146025, -2.868140, 1.453027, -1.014266, -0.626704, -1.431984, 1.272131, 1.996223, 2.216051, 1.321754, -3.712698, -0.555156, 1.665618, -2.150003, 2.469344, -2.743378, -0.621012, 2.153028, -1.323079, -2.781879, -3.508905, 2.613299, -1.131783, 1.250944, -0.726568, -0.350890, -0.510844, -0.491322, -0.969947, 0.056512, 2.073022, -0.684992, 2.324330, 1.388874, -0.592233, -0.007646, -2.144584, 3.341875, -3.342694, 1.472340, 1.924615, 0.260784, 1.455808, 2.527028, -0.212072, -2.648695, -3.802433, 2.224674, 1.380913, -2.425954, 0.170378, -3.111950, -2.677550, -2.053871, 3.144357, 2.069789, 1.802598, 2.778758, 1.854536, 0.374600, 2.614195, 0.576047, 0.823646, 2.809750, 2.347219, -1.944691, -4.136095, 0.170414, -2.563500, -3.029089, -2.152708, 0.650496, -0.629906, -6.837932, -0.400384, -2.685872, 2.135639, -1.354106, 1.782213, 1.624843, 0.522975, 0.623956, -3.265278, 4.157569, 1.269328, 2.334650, 2.017716, -1.755639, 0.314134, 2.929660, 1.085749, 0.075581, 5.186268, -3.722156, -0.716771, -0.292780, 0.854932, -1.265511, -1.754162, -2.801681, 2.599700, 1.192626, 0.355987, -2.978914, 3.488926, -7.477931, 0.302190, 0.159773, -1.786207, 5.423014, -3.205537, 3.759773, -0.824988, 1.123907, 1.247544, -1.165879, 4.103333, 1.861792, 0.432820, 2.554196, -6.796187, 3.449815, 2.632658, -0.926445, -1.542485, 3.184329, 2.700121, 1.650769, -4.734762, -0.469165, 1.700503, 0.710945, 0.347955, 3.436646, -1.542240, -1.133071, 2.124078, 2.539431, -5.082206, -1.449178, 0.902514, 0.330178, 0.733471, 0.341730, 1.352243, 1.385499, -1.848245, -2.310890, 1.524840, -0.910095, 3.168741, 0.410063, 0.614422, 1.316848, 1.423225, -2.094517, -3.665807, -0.068259, 0.396121, 0.212791, -1.601394, 1.471721, 0.772471, 2.288241, -0.320014, -2.428612, -2.265923, -2.968658, -0.796963, 5.248973, -6.998604, 2.486512, -4.433386, 0.621024, 0.518264, 0.806498, -2.155303, 0.587934, 1.328967, 2.631720, -3.549683, 2.099039, 10.083544, 3.482533, -1.516241, -1.431416, -2.913141, -0.618922, 0.779420, 0.236411, -0.174201, -0.076979, 0.220478, -2.811640, -2.158289, 1.648178, 1.729170, 1.846626, 4.329294, 2.369144, 1.389106, 2.679754, -2.294376, 0.030183, -1.699179, -3.449418, -2.010730, 4.066484, -4.480805, 2.193635, -3.104744, 3.195236, -1.664788, -5.720037, -1.266570, 3.600276, -3.170063, 1.536892, 1.569534, 1.513302, 1.860432, -1.550267, -4.262663, -2.133216, 1.650067, -1.284678, -6.684666, 2.014430, -0.744968, 1.426262, -1.076141, -0.831970, 0.365885, -0.528754, 0.758301, 3.348044, 0.187185, -1.889567, 2.917908, 3.023930, 3.023178, -2.817112, -3.179446, 2.497720, 2.188169, 1.301504, 3.268865, -1.754922, -1.822780, -0.972488, 0.873712, 1.323133, -2.205761, -1.612030, 2.303227, 2.813283, -4.188452, -2.182941, 0.085170, -3.564689, -2.566697, 0.568231, 0.818893, -1.662748, 1.361568, -3.690748, -0.252375, 0.785948, 1.647538, -0.423860, -0.906425, -1.824527, -2.521552, 0.867587, -2.148677, 2.452386, -1.397286, 1.692210, -1.310338, 0.637889, 2.583428, -1.523493, 1.296878, -3.746370, 2.423034, 1.349175, 0.721364, 3.474914, -0.023369, 0.163030, -1.579503, 0.504772, 0.925550, 2.454133, 0.964500, 1.823297, 0.661305, 1.748695, 1.492398, 0.669027, -0.978568, -3.156673, 0.232972, -0.302810, 1.174894, 5.136952, 1.104236, -2.518059, 3.446657, 3.263038, 1.778748, 0.553110, -0.299201, 1.262304, -0.469016, 0.049288, 2.680809, 3.362026, 1.534753, 2.141226, 0.736711, 0.353176, 1.463385, -1.981672, 1.908352, 2.260112, -2.573204, 2.284262, 3.457025, 1.571169, 0.957621, 0.818471, 2.919568, -2.219922, -0.831442, 2.851260, -0.567659, -2.098528, -1.197574, -0.571810, -3.110029, 4.160790, 0.262709, 2.311606, -0.834847, 2.457886, -2.212020, 3.465058, -1.780708, -0.743038, 2.232378, -1.010238, -3.758393, 3.163526, 1.855109, 0.639158, 0.407191, -2.256367, 1.018602, 2.291389, 2.191854, 0.564081, 0.571482, 0.289375, 1.938669, 2.964486, -1.157557, 3.346216, -0.604904, -1.879804, -3.309341, 2.884830, 1.600047, -1.182005, 0.213239, 2.088271, -2.216653, 0.805338, 3.107358, 1.409771, 2.426908, 4.041484, 2.601931, -2.011261, 1.826019, -3.202612, -1.295027, 3.832757, -1.571364, 6.595190, 2.578595, 1.157802, -0.552183, 2.183033, 0.745915, -1.118431, 0.077764, 3.768408, -1.987089, 0.194293, 0.610980, -0.361441, 2.990337, -2.133753, 2.919108, -1.559100, 2.185312, -1.411286, 2.620373, 3.767267, 2.179324, -1.849180, 0.756302, 1.529904, 3.719940, -1.179238, -2.207529, -3.936229, -2.072637, -2.803760, -3.367758, 1.098284, -2.009740, 3.083918, -0.394910, 2.485883, 0.113272, 2.655967, 1.349244, -2.255511, 1.025767, -1.384176, 1.732267, 2.225488, 1.098336, -0.509963, -0.263632, 2.146725, -0.957762, 1.923358, 0.133071, -3.363287, -1.703886, 1.032311, -0.755408, 2.498776, -2.764693, 1.509607, -1.617618, 1.436502, -1.559158, -0.746124, 1.562070, 17.831846, 4.211090, -1.706081, 2.346309, -3.273780, -2.834237, -3.982527, -1.515350, -3.250439, 0.453186, 0.603446, 3.211556, 3.159271, 0.628849, -4.001279, -0.854389, 3.856479, -3.991511, -0.177681, -2.286148, 3.187693, -0.223602, -0.075862, 2.957560, -2.254858, -0.967292, -3.214899, 2.144798, 2.144349, -4.565749, 3.582543, -0.960689, 2.410637, -2.425848, 2.851839, 1.097054, -0.605512, -1.531101, 0.845345, 1.495862, -0.475145, -1.574789, -0.029720, 0.645808, 3.104814, -2.337604, 0.435249, 1.841378, 0.299883, 2.139250, 2.561849, -2.265870, 2.402302, 0.791734, 4.197929, 0.943617, 3.297447, -2.381050, -3.254916, -3.195335, -2.113678, 1.651127, 0.783532, 3.214834, 0.408355, 3.205199, 1.115050, 2.330830, -2.771334, -1.927293, -1.882555, 7.298583, 1.038048, 3.148403, -2.869358, -1.115117, 0.074848, 4.928357, 2.089816, 1.986463, 0.470056, -6.525270, 1.073113, -4.135253, 0.774483, 1.268632, -2.197856, -2.068014, 4.605621, -2.308162, -2.780638, -0.721838, -4.479808, -3.508407, -2.428396, 4.441323, 1.775349, 0.214881, -1.635818, 3.045053, -0.610118, 1.187438, -1.759295, 3.817451, -1.664751, 2.842919, -2.662443, -1.073685, -1.129181, 0.843486, -0.775921, -2.891870, 2.612444, -3.581708, 0.276234, -1.549791, -0.161535, 9.922145, -2.840818, -1.211635, 2.635808, 6.841765, 0.938128, 0.082862, -0.197744, 0.354956, -2.333372, -0.819113, 0.838161, 5.306095, 1.898514, -0.049039, 2.551995, -0.244460, 3.227154, 0.455535, 2.356509, 1.989387, 1.096240, 1.646904, -4.098421, 0.545672, 1.364193, 1.341040, 1.556323, 5.145891, 2.229922, -1.214043, 1.089752, -1.778068, -2.560244, 2.266327, 1.981164, 2.707444, 2.781528, 0.979474, 0.972002, -0.075105, 0.934945, -0.866869, 3.448736, -1.561079, 1.061687, 2.550811, -0.469708, 4.136203, 3.849202, -1.115006, -1.362522, -0.477934, -0.217236, 3.059588, -1.913679, -0.223996, -2.238563, -0.246944, -2.901762, 2.800288, 0.591907, 2.385238, 1.304711, 1.502628, 0.314458, -0.560291, 2.228894, 4.770694, 0.590195, 1.130904, -0.154598, -2.602068, 2.284669, -2.444100, 2.262833, 2.987651, -1.718937, 0.172004, -4.404370, -0.713307, 3.951679, 1.743177, 2.184051, 2.084052, -1.200920, 3.804940, 1.190594, 0.423731, 0.387230, -2.698928, 4.313253, -1.562275, 3.057487, 1.332809, 3.277320, -0.025386, -7.029213, 2.271002, -1.368629, 1.512957, -1.627040, -1.321970, 1.505093, 2.593931, 2.138204, -0.703714, -2.467005, 2.581830, 0.924391, -0.011639, 0.399110, -1.663732, -1.625977, -2.453845, 1.651928, 2.514879, -1.845028, -0.415811, -2.509327, 2.051333, 1.113616, 0.354290, -3.648528, 2.132520, 1.173429, -1.130877, 5.016101, -0.490695, 3.052016, -3.462008, -1.558317, 2.028475, 2.340829, -2.054028, -2.028140, -1.076622, -3.786443, 2.211594, 0.838984, -4.107341, 1.741582, 2.438528, -1.545592, 0.031897, -4.673401, -1.738998, -0.626893, 1.342387, -0.145155, -1.596622, 2.264874, -2.561344, -1.419518, -2.779258, 3.840589, -2.814966, -1.121065, 3.103562, -1.434957, 1.904631, -0.152767, 3.093784, -0.006062, 1.727092, 3.195707, 1.766008, 2.366390, -2.307591, 2.373812, -0.476952, 1.527025, -0.465218, 2.363796, 1.407915, 2.833245, -2.019148, -0.643388, 2.679688, 5.222930, -1.854012, 2.572557, 1.569281, -0.477092, 1.902260, 3.180279, 6.306729, -0.274454, 0.718802, -2.418252, 4.774065, -1.660186, 0.724020, 1.382207, -0.139734, -1.164008, 4.906361, -2.018308, 3.283710, 0.376118, -0.720316, -4.012407, 0.609760, -0.314082, 1.398531, -0.268018, 1.786567, -2.602562, -0.330188, -4.301880, 0.335341, 4.711812, 0.695186, -2.297723, 0.472864, 0.569482, 1.624462, 2.681783, 3.021969, -0.104063, -1.222748, -3.950817, -0.500057, -0.442732, 1.211303, 1.949326, 1.100488, -5.104456, -1.600076, 1.789613, 0.082084, -0.346776, 0.059769, 1.983953, 2.033277, 0.305617, 2.394171, 1.896901, 6.016169, 0.465130, 2.148189, -3.048455, -1.015694, -0.439929, -4.321608, 2.767549, -2.717652, 0.469900, -0.449218, -1.194940, 0.407131, 0.130723, -0.135469, 2.412801, -0.450518, -1.383515, 0.950040, -1.357381, 3.228314, 3.361074, 0.332888, -5.870872, -2.020130, -0.178779, -1.861960, 2.273883, 0.218612, -2.976734, -1.733326, 1.392309, 1.946056, 0.799269, 1.524984, 2.629119, 1.725265, 1.525396, -2.106735, 2.488540, -3.239527, 0.826539, 0.666062, -1.867959, 0.354207, -1.698327, -2.927954, 2.341576, 0.872006, -2.815492, 3.722144, 2.325369, 1.456939, 2.497020, 1.038249, -1.285768, -1.675847, 0.893679, -1.395089, 0.595987, -0.750167, -0.563275, 1.091686, -2.418337, -1.896940, -1.411161, -0.380414, -0.252327, 0.983415, 3.090965, -3.424773, 1.611721, -1.380765, 2.076887, -1.467507, 1.811858, -3.494227, -3.977528, 1.935094, 0.442732, 4.073760, 0.244701, 3.578159, -1.013995, -2.507242, 4.465893, 1.057177, -0.537271, -4.829791, 0.020822, 1.384580, -0.917904, -0.844748, -2.344644, -0.291398, -1.531452, -0.735474, -2.581677, 1.842135, -0.054317, 2.477489, 0.283114, 1.382037, 1.793713, 2.417314, -3.699762, -2.272878, -3.177811, -0.718206, 1.148291, -1.529859, -0.163814, 2.781793, -2.223887, 3.794726, -1.517594, -1.921671, 3.090774, 0.501923, -1.972393, -1.791274, -2.481785, 0.448921, -2.090209, 0.220071, -1.382417, -0.468002, -3.140492, 0.560268, -3.443594, 0.177877, -1.951589, -2.809496, 0.517052, -1.871615, -2.020593, -2.291572, -1.610467, -0.813363, -3.394322, -2.246128, 1.871319, -3.381363, -3.987760, -2.516682, 0.588713, 3.890212, 2.469473, 0.362648, 0.561302, -1.753007, -1.627349, -0.349749, 2.216681, 0.383506, -2.713041, -0.593570, 3.251638, 2.689493, -3.748345, 0.691237, -0.569738, -5.734797, 5.413714, 2.996217, -3.365990, 1.122713, -0.547549, -2.552277, 3.383361, 5.260242, 2.122134, -0.637885, 2.626056, 1.089457, -2.199468, -2.468286, -0.702738, -0.091756, 0.488842, 0.760422, 0.146176, -1.661150, 2.917534, -1.514377, -0.663864, 2.496569, 1.366415, -2.151349, 1.885142, -0.772022, 2.252527, 2.092843, -2.031601, -0.813497, 0.407781, -3.519864, 1.166523, 1.347374, -2.144313, 1.894274, 2.548582, 2.886257, -2.012259, -0.423815, -1.669818, 4.414027, -2.174315, -0.024239, 3.345891, -3.157781, 3.547366, 3.083466, -2.391912, -2.350487, 1.912315, 2.167852, -1.419118, -0.530295, 1.211723, 2.307965, -3.893567, -1.757907, -2.166578, -0.312290, -2.318677, 2.761449, -2.065255, -2.718894, -5.395889, 1.222173, -1.110870, -1.290285, -1.207875, -0.060934, -2.584382, 1.737085, 1.880211, 2.757859, -2.688734, -2.082012, 0.599087, 2.687817, -2.736868, -2.483763, 1.212993, -1.285007, -3.371017, -4.127361, -2.956272, 1.772995, -1.661328, 1.707176, 0.474840, -0.346689, 1.808019, -1.051578, -3.156462, -3.182742, -2.470137, -0.530100, 1.256876, -3.449591, 1.078454, 3.284560, -1.910391, -0.292548, -1.595873, -0.838040, -0.740394, 1.511373, -3.185318, 0.501699, 2.445569, 1.670159, -2.597500, 3.368602, 4.472101, -4.204967, 4.533583, 0.047076, -3.118933, 1.523812, -1.076397, -3.584929, 1.941660, -2.212678, -0.267279, -2.351005, 3.205257, -1.552729, -2.789754, 2.306149, -4.425440, 2.926135, -1.727820, 4.657523, -3.581824, 1.874127, 2.977703, 3.189226, -1.316201, -2.322132, 0.326548, -2.243823, -0.581202, -2.148850, 1.752387, 3.638435, 1.993092, 1.322640, 3.349422, -4.158694, 2.212160, 3.103670, 2.673969, -1.280546, 1.500715, 2.859776, -0.207381, 4.820920, 2.770439, 1.037140, 2.432384, 1.314973, -3.747844, -0.571894, 0.404972, -0.784428, -4.287580, -0.850005, -0.596455, 3.271060, -2.868608, 3.010384, 2.683059, 4.961196, -1.193018, -2.061228, 0.384417, 0.839436, 1.829330, 1.032372, -2.438330, 0.873286, -5.543126, -1.238505, 4.828424, 4.276352, 2.086277, 1.759339, 3.404096, 1.391880, -2.645986, 1.286137, 2.212531, 2.400476, -1.635331, -1.826915, 1.386136, 0.734565, 3.339298, -2.710637, -3.658201, -0.764404, 3.978921, 1.805965, 0.612052, -0.069873, 1.873704, -0.291247, 0.808841, 1.046961, -2.339138, 2.548506, 0.116501, -1.358457, -2.428479, 0.405364, 0.049829, 2.078218, 0.630792, 2.682093, -0.111304, -1.323117, -2.635784, 1.362048, 3.085995, 2.625044, 1.898022, 2.371150, -0.770906, -0.275337, -1.812634, 0.451130, 0.030746, 1.921646, 2.202017, -0.176585, 0.895559, -0.876122, -3.598070, 0.757027, 0.715836, -3.268029, -0.826421, 1.655850, -0.267985, 0.701595, 0.129097, 1.087256, -2.069307, -1.865456, 1.049002, 1.045302, 1.117406, -0.491220, 1.752171, 0.180681, 3.506818, 1.705417, -3.404651, 2.519688, 2.092036, 2.918577, -1.644121, -0.248030, 0.368383, 1.397353, 0.468383, 2.009109, 2.615247, 1.785939, -2.678337, 1.617231, -1.236114, -0.461577, -1.838622, 1.968311, 1.328617, -2.911062, -1.685913, -1.801324, 1.286906, -4.115755, -2.113906, 9.896759, 0.374081, -0.388752, -1.223068, -3.063830, 2.229397, 2.612700, -1.033353, -1.022675, 3.386715, -1.121697, 1.809654, -2.561433, -2.942688, 2.570541, 2.310937, -1.491891, 0.108844, 1.215748, -1.947517, 0.629480, 0.692682, -0.667800, -2.369210, 1.485672, 0.038969, 0.029837, -0.422000, -0.252593, -0.688025, -1.452450, 4.435425, 3.307030, -1.265886, 6.128372, 1.978850, -3.929060, -0.298456, 2.120240, 3.654680, 1.866614, 2.054912, -1.318845, 1.181852, -0.038600, 3.942681, 4.401767, -1.473351, -0.744864, -2.225775, -0.871413, 1.849269, -1.780350, 2.124613, 2.299182, -0.689189, -0.497090, 0.518188, -2.052712, 3.142647, -2.284230, 4.709138, -0.259247, -0.405531, 1.163305, 1.855179, -0.130810, 2.581027, -3.659903, 2.676016, 1.587614, -2.458660, 1.384374, -0.823230, -1.355044, -2.172476, 1.655872, -0.893984, -2.042878, 1.969854, 0.140095, -5.839048, -1.551351, 0.726457, 0.911761, -0.065500, 0.825390, 1.000783, 2.737443, -4.026385, 3.902370, -1.769917, -0.966249, -0.816267, -0.570867, 1.182167, -3.117018, -2.331257, -2.639359, -3.660809, 1.104480, 1.201332, 0.084591, -0.062481, -1.295772, 1.735669, -0.666152, -0.181148, -0.707571, 6.112964, -5.684958, -1.611841, 0.320344, 1.758454, -2.201783, -0.981238, -2.890517, -4.545226, -2.521300, 0.036706, 0.128801, -2.098386, 2.333826, 2.365282, -2.611004, 2.553960, -1.038355, -5.702994, -2.997832, -2.794518, -2.389665, 2.133088, -1.820822, -2.890388, 2.260902, 3.086394, 0.570292, 1.511309, -3.697427, -0.810149, -0.843711, -1.795822, 0.495115, 2.823005, 1.649202, 8.319246, -0.411636, -2.392526, 2.539201, 2.370214, 4.756253, 0.828395, -1.034127, -2.109726, -1.572361, 5.465284, -3.657965, 4.423150, 2.149361, -7.255781, 3.108456, -2.962606, 2.280480, 1.510164, -0.990311, -3.053618, 2.630822, -1.767082, -0.539244, -2.688886, -1.095631, -0.003539, -0.584830, 0.941815, 1.863402, -0.325740, -0.093675, -0.985750, -2.204835, -2.334163, 0.659979, -1.940781, 0.892017, -0.047762, -0.801082, 5.068970, -2.416367, -0.054956, -0.609716, -0.925279, 3.985359, -2.428799, 1.385789, -2.310544, -0.390019, -3.638839, 1.571243, 0.829102, -1.987846, -1.974930, 0.148836, -2.629555, -1.189142, -1.211454, 1.648244, 1.809920, 0.444209, 0.857393, -0.552437, -1.938646, -1.704941, 1.153294, -7.150531, 2.521330, -4.333313, -1.203517, -6.791515, 5.610275, 2.526631, -2.193799, 0.316134, -2.975452, 0.043736, -2.145086, 0.947292, -1.770846, -1.070288, -1.937447, 2.345026, -3.003102, -0.961535, 4.269505, -1.473615, -2.028548, -1.439545, 3.920681, 2.086540, -0.087132, 2.692583, -1.969973, 0.640000, 0.759344, -0.851917, -2.110132, -1.116281, 1.290830, -0.036435, 1.081315, 3.280384, 1.031457, -3.773770, 2.426805, -2.144011, 2.387061, -1.523329, 1.364333, 0.632966, 1.254096, 0.397885, 1.691442, -0.060497, -2.736957, -0.955565, -1.479424, 1.189433, 4.488828, -1.039956, 4.110622, -4.271568, 3.211975, -0.931945, 2.384370, 1.593602, -0.530324, -1.894910, 0.872313, 0.775410, 0.611552, 3.815946, 5.441214, 2.328136, -3.694026, 0.856822, -0.358736, 1.209829, 1.578911, -1.869051, 2.328872, 1.902640, 0.171978, 1.517722, 0.237618, -2.074708, 3.189997, -0.351501, -3.595257, -3.138206, 0.722451, -0.521091, 2.513267, 0.863403, -3.036732, 0.966440, 0.639682, 3.953517, 1.480138, 2.872857, -1.725542, -0.765049, -3.056712, 0.961703, 1.613330, -3.012677, 1.984263, 0.603873, -0.146085, 0.401721, -1.095984, 1.858853, -2.949477, -0.214432, 0.300439, -1.510662, -1.520183, 3.227612, 3.277660, -0.816754, -1.828793, 1.171779, 2.029778, 1.869878, -2.729131, -2.555247, -3.633353, -3.591051, -1.858859, 2.480379, 0.767383, 3.103071, -1.344633, 2.634771, 4.324559, 0.995620, 1.743588, -4.545441, 3.179331, 2.600043, -1.224859, -0.539547, -2.826778, 2.330503, -2.919528, -2.707401, 3.279189, 0.743885, -1.030735, 1.944464, 1.497105, -0.072477, 9.878094, 0.991437, 1.252940, 0.908459, 2.009145, 2.427585, -2.123645, -2.284483, -0.918611, 2.746569, -2.033646, 0.837004, -1.525458, 0.870853, -3.641463, -2.634501, -0.521927, 0.874221, -1.667083, 1.903460, 3.153025, -1.037110, 2.270567, 1.085054, 0.564745, 1.921359, 2.154752, -1.015654, 2.087633, 3.454697, 1.214256, -1.918541, -1.607991, 1.002951, 1.865858, -0.964412, 2.511755, -2.399036, 1.691220, 2.473380, 0.841120, 6.731910, 2.276564, -0.619042, 0.109877, -1.801440, 2.544270, -3.653917, -0.909440, -1.673509, -3.541651, -1.475499, 0.974824, 1.341343, 3.371499, 2.327110, -4.265070, 2.730826, 6.252456, 0.442818, -2.496783, 0.133818, -1.569381, 1.881387, -1.776341, 0.944535, 1.976379, 1.078518, 0.454756, -0.553306, 1.956321, -2.193647, -3.906279, 0.997558, -2.133405, 0.813385, 0.066720, 0.675073, -1.305464, 1.049415, 0.058389, -3.035835, 0.971060, 1.673939, -1.002198, -2.784204, 1.691542, -22.129820, 0.332251, 1.568601, -1.923303, 0.294346, 1.423302, -2.935637, -1.775013, -3.467926, 2.910514, -1.682199, 1.219355, 0.793816, 2.940609, 2.325723, 1.023719, -2.592135, -1.507832, -1.641595, 1.456100, -1.044729, 4.106136, 2.221826, 2.339298, 3.302754, 1.938588, 0.728833, -1.555544, 2.474434, -0.518467, 0.533135, 3.680743, 1.677448, 2.104536, 0.646286, 0.555699, 2.477109, -3.550225, -3.647983, -2.079800, 4.112247, 2.804218, -0.919420, 1.293995, -3.565257, 0.565476, -1.367818, -1.903385, -1.448610, 0.866282, 2.322896, 1.147820, -0.627827, -1.059485, 4.482340, -2.255801, -6.158365, -1.201316, -2.617374, 0.849946, -3.063239, -0.336349, 3.439678, 1.400132, 0.616048, -2.214755, -0.108316, 1.013055, 1.614219, -2.119207, 2.570008, 1.538140, -1.337971, -2.840325, -0.002898, -3.976804, -0.981789, -2.240550, -1.108801, 2.162783, -0.593558, 0.575897, 2.254664, -1.405173, 1.071949, 2.099885, -1.849970, -0.874483, 2.513324, 2.759261, -1.591699, -2.328701, 1.031207, -1.108524, -12.576934, -0.212726, -0.139737, 1.239497, -1.242406, 1.892567, -0.104317, -1.160928, -0.283431, -1.366523, -0.203520, -0.691903, 1.787807, 0.201596, -4.096425, 2.446693, -3.331512, 2.033213, -3.020568, -1.573980, -0.388636, -1.998314, -1.966732, 1.677706, 3.899530, 2.133426, 0.836722, 0.754416, -0.003854, 0.806098, -0.711075, -0.452569, 0.355189, 1.279910, -1.450249, 1.334980, -0.518858, -3.259822, 3.693094, -2.071981, 0.501570, 0.056253, -2.921665, -0.392897, -0.867844, -0.986845, -1.664861, 1.737591, -3.061367, 0.302886, 2.094111, 0.045325, -1.595525, 0.674877, -1.471849, -3.893967, 1.412523, 1.773032, -2.750422, 1.035602, -0.516791, 0.286685, -0.128167, -3.022458, 0.957886, -0.954299, -2.147751, -0.348507, 1.870028, 4.274771, 3.953058, 1.525810, 4.137014, -2.523398, 4.372852, 0.338937, -0.623839, -1.676739, 9.756101, -3.779910, -1.706488, -2.624722, 1.853259, -1.887885, 0.109111, 2.570731, 1.300253, 0.520757, 0.706603, -4.339977, -5.510975, -3.262259, 0.894060, 0.797520, -1.387151, 1.952152, 3.178773, -0.055920, -0.988293, -2.944663, 0.906312, 1.589234, -1.910973, 1.550346, -3.043045, 1.202343, 2.156839, -3.822891, 1.310066, 0.109305, 0.393427, 0.848353, -0.702017, 1.135580, -0.544394, 0.898604, 2.183110, 2.875517, -2.742109, -2.914018, 2.957861, 1.904441, 0.188540, 3.428993, 4.164488, -3.325378, -0.339448, 1.521152, -0.671186, 0.412715, -0.244523, -0.876270, 1.905240, 1.004341, 1.861009, 3.023627, 1.829536, -0.710838, 2.065416, 3.122601, 2.638688, -2.839584, 2.718982, -0.912577, 3.302638, 2.473169, 0.684764, 1.555684, -1.825787, -1.142594, -0.022323, 3.818839, -3.767341, -2.613019, 0.704796, 2.798040, -5.006337, -0.892998, 2.696245, -2.926975, 2.355693, -0.578362, -1.544222, 3.422838, -2.041148, -0.578029, 0.011704, 1.837247, -1.490923, 2.378649, -2.392335, 3.045223, -3.292506, 0.487439, 2.134249, -0.827777, 0.996064, -1.616960, -0.817308, 1.334069, 3.380960, 2.541771, -0.265344, -0.917402, 2.407108, 1.253521, -1.562716, -1.517113, 2.796017, 0.775317, 0.575597, 1.323530, 1.529689, -16.116417, -2.998111, 0.450969, -1.814111, 0.349480, 2.782877, 1.295974, 1.560289, 1.718585, 1.898549, 2.660482, 0.864414, -2.238849, 0.737516, -2.422750, 0.247123, -2.743533, 4.227241, 0.526669, 0.501835, -2.821635, -4.300069, 1.813544, -1.345552, -3.812800, -0.391366, -1.456246, 2.738957, 1.825692, -2.190766, -3.353376, -1.940057, -0.862514, 0.672473, -1.073232, -4.133321, -0.304357, -2.691906, 4.606014, 2.997205, 1.211570, -1.067043, 0.287098, -2.027639, -2.065545, 3.585075, -1.943740, 1.839725, 0.222802, -4.045444, 0.650917, 0.830172, -2.850742, -2.450385, -0.215431, 1.970153, 3.219507, 0.619687, -0.603187, -2.238926, 2.604592, -1.533692, 1.733842, 1.440334, 0.821643, 1.787324, -3.166633, 0.036767, 5.421640, 1.393930, 3.653860, -1.986755, -3.910746, 1.559711, 0.495545, -0.567174, 3.627657, 0.452299, 0.548954, 4.050268, -3.818210, -0.528180, 1.463299, 1.876337, -3.187659, 0.062530, -3.048895, -0.426209, 1.873657, -0.866349, -2.653108, 3.005883, -3.116135, 1.542959, 0.393652, 2.553407, 0.303429, 2.347852, 1.130731, -1.145539, -0.604549, 0.910890, -2.142335, -0.729891, -2.702992, -1.919118, 0.967466, -2.112257, 2.792009, 0.179999, 2.758096, -1.864986, -3.021833, -0.889339, 1.378984, -0.177384, -1.409245, 1.099375, -0.886126, 1.222357, -1.046859, -1.496866, -1.473668, -1.838376, -2.917238, 0.983777, 2.499777, -1.375984, 2.721064, -2.649018, 1.615053, 0.037384, -2.364583, -0.474144, -1.747913, -2.922879, 0.293949, -1.100694, 1.720940, 2.906822, 0.333392, 3.154211, -2.480198, 1.405208, 0.162729, 1.072986, 1.403922, -2.553919, -1.325863, -0.578505, 1.225022, 1.082428, -1.728628, 2.811140, 3.167954, 2.595941, -0.468482, 0.676855, -3.399291, -2.395138, 2.171390, 5.313373, 0.593171, 1.174135, 1.041957, -3.592760, -1.115999, -1.609447, 0.996160, -4.086498, 1.880493, 2.586645, -1.809857, 1.197866, 3.047547, 2.049238, -5.638014, 3.052633, 1.992575, -0.746054, 2.904683, -0.634189, -1.680446, 6.441619, 0.607398, 1.153746, 0.197727, -0.397482, 3.676062, 4.964014, 3.061106, -1.133064, 0.068444, 0.732943, 1.284233, 0.121063, -2.615230, -0.237515, -0.004988, -0.984315, -0.205606, 0.886206, 2.857014, -1.104957, -0.110875, -1.748340, -1.275044, 0.566729, 2.455837, 0.216097, 1.074522, -1.081340, 1.013576, -0.510011, 0.004325, -1.918031, 2.226862, -1.485101, 1.199568, 3.146095, 2.427711, -3.011320, -2.449924, 1.329107, -1.326373, 0.970822, 0.849875, -1.601499, 2.314740, -3.488747, 1.154833, 0.087346, 0.803074, -0.903432, -3.312709, 2.299358, 2.803974, -3.286255, -1.573510, 2.070856, -0.101941, 2.171456, -1.329902, -3.907687, -3.720195, -2.571179, 2.813491, 1.830530, -2.511291, 1.457561, -2.576567, -2.362876, -1.616081, 1.032160, -1.181725, -0.276253, 2.621583, -0.266221, -0.139193, 0.748835, 1.204025, -1.065338, 0.172388, -0.772580, 1.074110, 1.518136, 1.267711, -1.981061, 1.981899, 2.800466, 2.231948, 0.890822, 1.847108, -2.885270, 1.300715, 4.157722, -3.683444, -3.738341, -1.370671, -0.604141, -1.896416, -3.395854, 5.339676, -3.188531, -1.775950, -1.851381, 1.067175, -1.844483, -1.300046, 3.536031, 1.521053, -3.789303, 0.140510, 1.047382, -0.060062, -2.875290, -1.034876, -1.860420, -3.490496, 0.724952, 0.459753, 2.903262, 1.382170, -1.717902, -1.420321, 3.068552, -2.760029, 0.564609, 1.620253, 0.562308, 0.044785, 0.964442, -2.613660, -1.785604, -0.199799, 1.718101, 1.127491, 2.559109, 0.299823, 0.551968, 2.438937, 0.701297, -0.019573, -2.164789, 1.922659, -0.866623, 0.326152, 2.629571, -0.972274, -4.290656, 4.196831, -3.415634, 0.356076, 3.115250, 1.733862, 1.982695, -3.026798, -2.023067, 2.122068, -2.348504, 0.413962, 0.148813, -3.872636, -1.810721, 0.762815, 0.159291, -0.137686, 2.732738, -1.524898, 3.947837, 2.365946, -2.701674, 1.253832, 0.194691, -0.156369, -3.185858, -1.945729, 2.551816, 3.177832, -0.077237, -2.811372, 3.560195, -1.534038, -1.941651, -1.060778, -1.237860, -2.827729, 1.169655, -1.817541, 1.247796, -2.044961, -2.042051, -0.980262, 0.946876, 1.271373, 3.471315, -2.489898, 2.386957, 0.589401, 0.960726, 4.046332, -0.806419, -0.904894, 3.152041, -5.319210, 2.374078, 4.134824, 1.937350, -0.497493, 2.662346, 0.755571, 4.101086, 0.119435, -2.297841, 1.126628, 1.798876, 1.621213, 3.012361, 0.051148, -2.327565, 0.777902, -2.543133, 0.416235, 1.116755, 3.242164, 1.235808, 0.284160, -1.247335, 1.107888, -1.079982, 4.659342, 2.768052, 1.012518, -3.917892, 0.223945, 0.545874, 4.944964, 2.193990, 3.037240, 1.796157, -1.105980, -2.231532, 2.658256, 2.005485, -3.134518, -1.135793, 0.886341, 2.554361, -2.021382, -2.364516, -0.521016, 1.373525, 1.369994, 1.462440, -0.309788, -2.899583, -3.485990, -0.327034, -0.483858, -2.365628, 0.571344, -3.879119, -0.263073, -0.500833, 0.611228, -1.481678, -2.642653, 1.774129, -3.965195, 2.924938, 0.561954, 2.093593, 2.242171, -3.799051, -0.805658, -2.153137, -0.395024, -3.239681, 2.431444, 0.556913, -3.113872, -2.407112, 0.681333, 1.893931, -2.904344, -0.771003, -1.218895, -1.536474, 2.928406, -4.104526, 1.685589, -1.799101, 2.594079, 1.383285, -3.314305, 3.277896, -3.116689, -0.780264, 2.376668, 2.787229, 1.384615, -2.993711, -3.415761, 3.395609, -1.612519, 5.627576, 3.240973, 0.276432, -2.974000, 5.787770, 2.183374, -0.118661, 2.462688, -1.815760, 3.092844, -1.904415, -1.680902, 3.111355, 2.789873, -3.434824, -2.328745, 4.971395, -0.408980, -1.288155, 1.367070, 0.403338, 0.400134, 1.119727, -0.911876, -1.399861, -3.973023, -2.791090, 3.630412, -1.710031, 2.134027, -0.959766, -0.329367, -0.732914, -0.993124, -2.181825, -1.024484, -1.577234, 1.415553, 1.184569, 0.033538, -0.182342, -1.553549, 0.714704, -2.457281, -1.513178, 1.668061, -1.185722, -0.021128, 0.509903, 0.016285, 2.772515, 3.466581, 2.642076, 2.103426, -0.777547, -3.492056, 0.086423, -1.734952, -2.397410, 0.079009, -0.129437, 1.052238, -0.897817, -1.193079, -2.432753, -0.610183, 0.759082, 2.856251, 0.527911, 2.582767, -1.680882, -0.903450, 0.290197, 1.900693, 3.820286, -1.310768, -2.894516, 0.749423, -2.294792, 1.678832, -1.254640, -2.198882, -1.182828, 2.383785, -2.344207, -3.577337, 1.876101, -3.005041, 3.585013, -0.243899, -2.782204, -0.227220, 0.830313, -0.217250, -2.924293, 1.900393, 1.335294, -0.480179, -2.973958, -3.055072, 0.840056, 3.724781, -0.353757, 0.307534, 0.540906, -2.654098, -0.348129, -0.920658, 2.132657, -2.957996, -3.357490, -1.639126, 0.272579, -1.003739, 1.129556, -0.414330, -1.633175, -2.019671, -1.955855, 1.487665, -1.117603, -2.708709, -0.485532, 1.924051, 2.907635, -4.480110, -1.866212, 1.161165, 1.436610, 0.408536, 0.190726, 1.837151, 3.994542, -1.868298, 0.735382, 3.357700, -3.131468, 1.535451, -2.268453, 1.079755, 1.087334, 2.349835, -2.999949, 0.427312, -2.889404, 1.677031, -0.963640, -0.438927, 3.972819, -0.491260, 3.205900, 2.318801, 3.833720, 1.883632, 0.465372, -2.379404, 2.530885, -2.490902, -1.817383, 2.394443, 2.693515, -2.303585, 2.991621, -0.318521, -2.124806, -1.568704, -1.105953, 1.689304, -1.503239, -0.745483, 1.219584, -0.008324, -2.262623, 2.536577, 0.300485, -0.126827, 0.379301, -2.164514, -1.050668, 2.279346, -0.929581, -2.010704, -1.761641, 0.769421, 5.870451, 2.409795, 1.549106, 0.406771, -2.387406, 2.617296, 2.226446, -2.372685, -2.909598, -4.472449, -4.199341, 0.783240, 0.096103, 0.230260, 2.392704, 0.130306, -1.671189, 0.000460, 0.859836, -2.856006, 0.879900, 2.923421, -3.459907, -2.481248, -0.380551, -2.122747, 2.509279, 1.280891, 1.923709, 0.026194, 0.975075, 0.535158, 5.102134, -3.227482, -0.724829, -0.011868, -2.003006, 0.030337, 1.065228, -0.093162, 4.732137, -2.835727, 2.164323, -3.163384, 1.464387, -9.556238, 3.294225, -1.643234, -0.544956, 1.026036, 1.801332, 0.531682, 0.875548, 2.275990, -5.821035, -2.165171, -2.959899, -3.164179, -0.533030, 0.882995, -3.908379, -1.524249, -3.931026, -1.867308, -7.501748, -3.151053, -3.374834, -1.482490, -0.023572, 4.518243, 0.564633, 4.339701, 3.183874, 4.368966, -1.674653, -1.995406, -1.902929, 2.051385, -1.286106, 2.168142, 1.894793, 3.113796, 16.570700, -2.124722, 0.712696, -8.329745, 1.154208, -1.029663, -1.493598, 0.788374, 0.373883, 2.354367, -2.372883, 2.818204, -3.362396, 2.617382, 4.474661, -0.396233, -1.267438, 2.482839, 0.489908, 2.244136, 1.239966, 4.426870, 5.876467, 5.865771, -2.613004, -0.572432, -0.640416, 1.465602, 0.965603, 1.523670, 3.018555, 2.185856, -0.685083, 1.380377, -1.892750, -3.034616, -1.156111, 0.307927, 4.088897, -0.267865, -0.748823, 2.313815, 1.472056, 3.863946, -1.217377, 0.140903, 2.896098, -1.744379, -1.723044, -0.629762, -0.662425, -2.445518, 0.152551, 1.374967, -1.386010, -2.313100, -4.004970, 1.415225, -4.312975, -1.410863, -2.737320, 1.355401, 0.709947, 1.922767, -0.346310, 0.500168, -0.085046, -3.296778, 1.980763, -1.139568, 1.725973, 3.104150, 2.869179, 0.393733, -0.193732, 3.668149, -2.017278, 0.133616, -0.618817, -1.574684, -3.663116, 0.169122, -0.268842, -1.601942, 0.325557, 1.805874, 2.177101, -2.602196, -1.913941, 0.011868, -0.395234, -0.834341, 1.420007, 1.540699, 3.013688, 1.357066, 1.180240, 1.042086, 2.019283, -2.652542, -2.059645, 0.797553, -0.451454, 2.375256, 2.512674, 1.454196, -2.217665, 0.813907, 1.702392, -0.050121, -6.994725, 5.303644, 1.784331, 0.242761, 1.062950, 3.647518, 2.466606, 0.988599, -0.931017, 2.402037, -1.814979, -0.855702, -3.275341, -1.074031, -2.235515, -1.613209, -1.744707, -1.840088, -0.779895, -4.490575, 0.831230, -0.156800, -2.528859, -3.127295, 2.660365, 1.944716, -0.364012, -1.861313, 3.323568, -0.811558, 0.169003, 2.853401, 3.639811, -0.011596, 1.986325, -1.664246, -4.615153, 2.271653, 1.025806, -3.154962, -2.319878, 2.225085, 0.531775, 0.322223, 1.468869, -0.566937, 1.309493, -1.211456, -3.419137, -1.585997, 1.178995, 0.171657, 3.465562, -0.804610, 1.665783, -5.088279, 0.650534, 1.791170, 17.250408, -1.626894, -1.974006, -0.824130, -3.331557, -3.880378, 0.599182, -0.694505, 4.215900, -1.447300, 2.230931, -0.596584, 0.340238, 4.081385, 3.625847, 2.839355, 3.756844, -2.777805, 0.100937, -1.989295, 3.277542, -4.379092, 3.087123, 1.403637, -2.231328, 2.390607, 1.557257, 3.298607, -0.299247, -1.181531, 0.525384, -0.890912, 1.824297, -3.995151, -5.042112, 0.744427, -3.067411, 1.595778, -2.571892, 0.771376, -3.277676, -1.455596, -0.250049, -1.351680, 0.697943, 1.069591, 2.857038, -0.159182, -0.362470, 1.539649, 0.099324, -0.135235, 1.614402, -1.260010, -0.059322, 0.792264, -3.511580, -0.926815, -0.584613, 3.695359, -1.960196, 3.956642, -2.118308, 1.945329, -3.706782, -4.705505, -1.869684, 1.638058, -4.319848, 0.475216, 3.160248, 1.229439, 5.102840, 1.172704, 0.917782, 2.616993, 2.139804, 0.179050, 0.635365, -2.586541, 3.264217, 2.315623, -0.899583, -0.615601, -2.657061, 1.547319, -2.286578, 1.879086, 1.495636, 2.321740, -0.555475, -1.915786, 1.741425, 0.884898, 2.387746, 2.810685, -1.226168, 0.692660, 2.960999, 3.579106, 2.618080, 2.850123, 3.143281, -1.445385, -0.869172, 2.514671, -2.716269, -1.470776, 1.258983, -3.679865, -2.217870, 2.349101, -1.496878, -3.025600, -4.349153, -1.707175, 3.005045, -1.947022, 0.834064, 2.545904, 0.842057, 1.460314, -0.993782, -0.211882, -3.322590, 0.401144, 2.192401, 0.763440, -2.603230, 2.001291, 0.178745, -0.747411, 2.043627, -2.383173, 1.185634, -0.005095, 2.036742, -2.243795, -0.514880, -1.657962, 0.009154, -0.846694, 3.579744, -1.727863, 1.975658, 0.298879, -1.385962, 3.904871, 0.650457, -0.503490, 1.660019, -1.920314, 1.000721, 2.794953, 0.703100, 1.590531, -3.563776, -2.024322, 0.831120, -0.502338, 1.180401, -1.431693, -3.374807, 1.813871, 0.377378, 2.058053, -2.708455, -0.701740, 0.503954, 3.542927, 1.265793, -2.440325, 5.357823, 1.181739, 1.192558, 0.281461, 2.353562, -0.804080, -1.500842, 0.188305, -3.794515, -1.650937, 1.416369, -2.651186, 3.189369, 1.791879, -2.396369, -4.074958, -2.053515, -2.250571, -4.064375, 0.532033, 1.440506, -1.240853, 2.492684, -2.390124, -1.024883, -0.555563, -1.956213, 1.623435, 2.391842, 3.169787, -2.010656, -2.048950, -0.517389, 3.292646, 2.329223, -2.762945, -1.225980, -3.282871, -0.160141, -1.514427, 1.987719, 0.649399, 0.815709, -2.400442, -2.567317, -1.862416, -1.602954, 4.048628, 1.432278, 1.482120, -0.028168, -1.211920, -1.063073, 0.239240, -0.337652, 2.459109, 0.510653, -1.598884, 2.341745, -3.938889, 0.773527, 1.360329, 3.298347, 3.038607, -1.644866, -0.088439, 1.705638, -1.667633, 3.251128, -0.499025, -0.244674, -2.124416, 1.485698, 1.383972, -0.360520, -1.537704, 0.886796, 1.275228, 1.144015, -0.614845, 3.147204, -2.104358, -1.287056, -1.382660, 1.712896, -1.635278, -2.644013, -0.343970, 1.227356, -0.518890, -3.459199, -1.418720, 0.039872, -2.601501, -1.806357, -1.778686, 2.005520, -1.855919, 0.793212, -1.463252, 1.586207, -0.184600, 1.077291, -3.230673, -1.065376, -2.426338, -4.433507, 3.362201, -2.072861, 4.426600, -1.687956, -0.826343, -0.115859, -2.048442, 0.140565, 4.825446, -1.373407, -0.139355, -0.565521, -3.447233, 2.419903, -1.851389, -3.471667, 1.108885, 3.593758, 0.237525, 2.659810, -1.035963, -2.568973, 1.054946, -1.254823, -1.618325, 0.781841, 1.970722, 3.680123, 1.463840, 0.097049, 1.916009, -0.708227, -0.067044, -1.741496, 2.598838, 3.050692, -0.787915, 2.766759, 1.675597, -1.173892, 2.223792, -1.062551, 2.273316, 3.011199, -1.650119, -2.648605, 0.158647, -2.177063, 0.186268, -3.177166, -0.526156, -1.345172, -2.188882, -0.780098, 2.949754, 2.012983, 0.683741, 1.745923, 3.283293, -2.406209, 2.281897, 1.034458, 0.070274, -3.126071, -3.453828, 0.506103, 4.258954, -0.538088, -2.218058, 3.084713, 5.720852, 2.743039, -1.810076, 6.272157, 0.664128, 3.120400, 0.742479, 3.379241, -2.731331, -0.166025, -6.347305, -2.193065, 0.273632, 0.289647, 3.115033, -2.160245, 0.387171, -0.448133, -1.982356, -4.029318, 1.811508, 3.046365, -2.964187, 2.094236, -3.426285, -3.485981, -1.995775, -1.983477, -2.634778, 0.562914, 1.780642, -2.165194, 0.609340, 0.511937, -2.450258, 3.387038, 2.438292, 1.092778, 1.200358, 2.149229, -3.404688, -3.018096, 4.586117, 1.999421, -1.138862, -1.361518, -0.968755, 1.716326, 2.557005, 1.137697, -1.860175, 4.096773, -2.173637, 0.623251, -0.005716, 0.244621, 2.366596, 3.502475, 1.700907, 1.084500, 3.542811, 0.157646, 0.709190, -1.301711, 1.101328, -1.765094, -2.921493, -0.108164, -0.767839, -1.458841, -3.287886, -2.644441, -0.287442, 0.930482, -1.790940, 2.306692, 2.406610, -3.171745, 3.804529, -2.363433, 2.127100, -0.969682, -1.289734, 0.080165, -0.030003, 3.477387, 1.284148, -0.626319, -1.433724, 1.803858, 3.479468, -0.094739, 0.400459, -7.394617, 0.835967, -2.398387, 0.867873, 2.937330, 0.751119, -1.297753, 0.425168, -1.585854, -3.647148, 3.178249, 1.690364, -1.492471, 2.513674, -1.554596, 3.298636, 0.609163, -2.723015, 2.000729, 1.757294, 0.908518, -3.078389, -2.418847, 7.257916, -1.492727, -3.191773, 2.915990, 1.081085, -1.266900, 1.227162, -3.655681, 0.880118, -0.415529, 4.598647, 0.800119, 3.403203, 1.137017, 0.396906, 0.119401, 1.960427, 3.879504, 6.221431, -3.068269, -2.315856, 0.694096, -0.145249, 0.976113, 1.862161, 0.650839, 0.130839, 0.288261, -4.273160, 0.193502, -4.425699, -2.068643, -1.223312, 1.958007, -0.721088, 1.534529, 2.555326, 2.301806, -1.540642, 2.078291, -3.631954, 4.562294, 1.094564, -0.621842, 1.441936, 2.677450, -1.552978, -0.132519, 0.788240, 1.321109, 3.038334, -5.551532, 2.999269, 0.364990, -0.272237, -2.743863, -2.561628, -1.798099, -0.921997, 2.971652, -1.706339, -1.688948, 0.070037, 3.021770, -3.601783, 2.029795, -0.783538, 1.702649, -0.212672, -1.084160, -0.049358, 5.279011, -1.903958, 2.085022, -3.444110, -1.312608, 2.920285, -1.673897, -0.170632, 0.140543, 0.217857, 0.993657, -0.147226, 4.131040, 1.917654, -0.402115, -1.762819, 4.605012, -0.253315, -0.367822, -1.636496, 2.923368, -3.571045, 1.615221, -0.274513, 1.293480, -3.736974, -2.906220, 1.564071, -1.271615, -4.054400, -0.437894, -2.497492, -4.102562, 0.335778, 4.307206, 6.871069, -1.353800, 1.235324, 1.515420, -0.432864, -0.915976, 1.982390, 1.529526, 1.321939, 0.553168, -0.996906, 1.229996, -1.395458, -0.720184, 3.174892, -0.654703, -5.431517, -2.564596, -2.954030, -1.324461, 1.512441, -2.701057, 0.388167, -2.322500, 2.430663, 1.636868, 3.658733, 0.276975, -2.526526, -2.830755, 0.008060, 2.742812, -1.348127, 1.398821, -1.734482, 0.392166, -0.090562, -1.914905, 1.780919, -2.045624, -1.376605, -1.993155, -0.685609, 2.082545, 2.854451, 1.075901, -1.963066, 2.440422, 0.654377, 1.624016, 0.347185, 3.520130, -3.108392, -0.240567, -3.161610, -0.800270, 0.307272, -6.462657, -2.478560, -1.957913, 1.711907, -0.733501, 0.139884, 1.806551, -1.349770, 3.346692, -7.009873, -2.372363, 0.695869, -0.527927, -0.040519, 3.153802, -0.830838, 2.495985, -0.613522],
+	"minicpm-v:latest":     [0.373405, -0.434066, 0.402574, -0.150118, -1.009159, 0.055999, 0.578607, 1.257351, -2.809614, 3.972482, -0.813682, 1.450852, 0.222104, -1.309440, 0.825239, 1.990135, 1.091002, -1.035189, -0.127123, 1.494732, 4.583488, 2.152238, 1.950469, 0.040962, -1.026761, -2.280671, -0.764667, -2.645734, 0.738428, -0.337854, -0.514919, -2.169918, -1.606695, 0.726714, 2.363379, 0.812033, -1.017132, -5.906273, -0.068586, -3.161905, -0.668377, -0.930418, 0.602283, 1.411093, 0.929184, 0.361665, -7.188779, 0.122860, -0.208246, -0.857899, -1.278283, 0.683655, -1.143659, 3.433253, 0.186680, 0.858210, 0.086462, -1.969538, -0.585241, 2.501438, 1.663121, -1.452310, -2.782187, 2.376974, 0.011909, -2.539715, 0.295956, 0.566496, -1.961629, -0.696331, 2.636359, 1.223098, -0.417314, 0.676321, 2.153070, 0.455967, 0.085189, 1.253216, -0.196767, -0.790615, 2.291733, -0.371285, -3.192962, -1.635582, 0.242907, 17.824533, -1.191271, 0.693028, 0.503808, -2.743020, -0.241273, 1.333885, 1.035344, 1.961190, -0.152041, 1.285618, 6.496006, 0.763207, 0.087968, 2.870465, 0.356444, -1.683651, 2.216422, -1.844649, -0.691816, -1.050912, -0.499342, 1.075545, -0.171938, 0.909891, 1.241483, 1.092643, -2.173166, -12.015554, -2.856367, -20.698908, -0.609858, -3.207479, -0.977120, -0.093685, 2.417937, 2.442160, 0.848334, -2.646831, -1.455137, -0.189020, 3.002412, -1.256232, -21.787876, 0.788017, -0.381844, -1.949451, -1.868758, 0.008738, 2.679282, -0.296270, -1.594275, -2.922423, 0.540774, 0.256479, 3.338656, 0.494479, -2.322596, 2.338914, 0.057826, 1.467675, -1.584482, 0.633437, -3.083071, -0.641232, -1.212952, 0.981840, 1.525307, 0.569190, -2.707587, 1.231240, 0.272207, 0.788035, 0.123544, 1.150698, 0.283472, -2.258527, -35.561195, 2.163777, 6.679248, -1.705841, -0.327031, 2.162935, 1.658646, 0.227390, 0.758502, 1.687766, 1.669509, -0.139092, -0.986089, -0.276972, 1.080841, -0.210213, -0.943579, -2.606741, -4.459008, 0.478328, -1.479767, -3.058852, -8.545797, -0.395627, -1.164287, 2.495408, -0.183550, -2.715141, -2.877285, 0.344198, 0.790495, 0.623198, -1.037656, -0.456738, 1.659658, -0.046735, 2.193197, -0.659875, 2.066572, 2.893802, 0.885306, 0.174187, 1.547745, 1.911764, -1.608837, 0.710351, -1.673273, 9.898326, -2.634362, -0.044190, 0.545811, 0.707455, 3.859440, -3.805061, 2.133735, 4.158154, -0.054382, -0.429803, 0.409739, -0.137998, 2.086717, -0.399401, 2.022264, -0.645827, 1.551178, 0.694332, 0.139730, -0.094900, -0.723024, -2.779635, -0.507423, 1.105360, -0.154632, 0.736738, 0.161376, -0.676730, 15.677238, -0.188134, 1.856025, -1.736571, -0.953508, 0.939773, 4.345387, 0.713690, -2.220577, -2.284271, 2.869463, 1.600713, -0.709076, 0.347308, -1.171167, 1.196288, -2.898170, -0.818068, -0.079971, 1.877244, -0.987878, -2.073211, 2.558323, -1.109507, -0.980287, 0.290609, -0.468093, -1.300264, -2.062796, -0.978946, 1.764146, -0.106737, -1.404154, -0.090169, 0.652458, -0.804112, -2.321134, 1.668066, 0.430334, 6.668191, -1.203562, -0.704168, -0.549548, 2.036619, 0.800234, -2.810486, 0.518899, -0.560133, 1.006556, -2.317722, -0.750900, -0.774626, -2.130997, -3.248784, 0.649599, 1.748983, -0.228464, -0.605739, 0.412901, -0.329177, -1.682082, -0.877227, 0.548816, -0.893511, 0.946567, -2.476345, 1.110094, 1.972772, 0.139028, 2.039542, 1.314990, 0.706638, 0.704419, 1.276759, 0.549634, -0.312816, -6.987369, -2.040702, 0.164418, 0.289892, -1.125816, -0.748568, 1.327388, 0.417259, -1.186356, 0.290069, 0.164743, -1.728422, 0.379547, -1.688991, -1.351178, 2.636830, -5.866957, -0.173038, -0.586063, 2.464278, 1.674203, -0.039280, 2.671825, -0.426402, 0.797512, 1.504823, -4.485917, 0.373612, 0.854094, 1.092571, 1.066606, -0.742925, -1.399240, -0.229709, -0.043383, 1.223743, -0.173675, -1.400634, -0.751707, -1.184195, -2.087879, -1.150729, -3.714264, -1.186671, -2.254113, 0.601697, 2.189452, -2.337301, 39.923878, 1.442919, -6.136178, 0.997319, 0.443302, 1.893847, 2.134698, -1.521084, -2.211249, -0.605163, 2.920052, -1.224859, -0.488684, 0.174961, 1.765274, 0.043963, 1.291592, -0.027254, 0.362563, 0.535911, 0.055040, 0.965669, -0.998588, -0.965053, 0.910799, -0.635001, -0.755441, -0.216526, -0.230160, 1.201640, -1.004903, 4.072732, -3.240913, -0.649104, -0.221394, -0.652424, -0.344757, -0.897428, 1.605987, 0.713921, 1.132797, 3.124265, -4.620122, -1.233402, -2.835165, -1.103005, -0.272193, 1.986733, -2.367633, 2.668632, -0.008239, -0.637520, 1.968715, 0.809258, 1.002304, 0.056953, 0.215855, -0.626684, -1.733515, -0.024491, -1.386338, 3.167961, 0.568686, 1.992621, -0.684541, -2.265276, -4.273585, 3.601405, 0.195045, 1.275817, -1.411008, -1.897714, -1.553356, 0.561319, -1.729886, -0.169032, -2.529392, -1.037613, 3.703908, 3.448970, -1.749579, -2.396593, 0.606615, -1.292753, 0.056893, -1.645130, 1.672513, 0.194815, 0.816912, 1.116137, 1.635205, -0.354506, 0.295139, -1.623365, 1.817265, -0.269048, 1.538462, -1.011906, 1.269872, 3.067373, 0.211793, -0.732102, -0.391167, -2.233507, -2.231629, -2.095895, 0.727035, 1.411009, -0.877170, 0.340414, -0.075507, 0.745245, -1.728194, -0.083872, 3.342732, 3.725381, -0.929764, -0.378220, 0.475852, 0.933637, 0.376741, -0.074854, 0.588735, -1.907476, 2.541552, -0.915148, 1.144570, 1.298011, -1.079804, 0.938594, -0.981594, -1.589298, -0.491617, 0.186467, 0.123616, -2.508697, -1.810076, -1.044597, 1.582594, 0.258120, -0.006478, -0.524830, -1.418215, -2.215704, 0.407240, 1.460952, -1.709083, -1.259769, -1.154309, 1.705392, -0.556009, 1.421965, -0.456303, -2.493070, 0.633029, -0.641055, -0.343872, 1.294190, 2.638387, -0.840192, -0.314008, 11.737316, -1.054496, -0.697600, 0.080303, 0.745963, -0.622276, -1.977137, -1.301768, 0.361449, -1.011482, 0.635370, -2.172765, 1.191688, 1.330068, -3.170839, 0.580865, 1.834052, 4.704100, 3.587384, -3.430662, 2.697701, -0.127231, -1.113158, -1.443183, -41.022354, -0.110150, -1.822315, -0.207086, -0.767085, 1.362608, 0.168367, 2.363555, -0.680236, -0.087632, 1.561876, 1.680736, 3.146271, 1.144814, -0.160254, 1.950765, -1.504552, -0.332065, -0.360760, 1.558014, 0.636103, 0.785471, -1.704202, 0.857587, 1.007262, 1.093875, 2.384038, -3.869771, 0.113459, -0.496516, 1.827181, -1.552594, -2.527205, 0.675593, 0.022145, 1.097947, 0.381638, 1.416903, -1.212677, -0.577768, -1.223361, -0.348717, -0.045128, 0.010273, -1.609390, -1.338410, -1.802680, 0.713390, 1.108162, -0.479905, -2.831465, 0.522394, -0.296082, 2.357434, 2.456294, -1.470178, -0.920917, -2.398948, -0.204510, -1.523816, 0.510649, -0.536751, 1.747033, -1.597472, -0.743485, 0.238079, -1.569961, 1.913989, 1.672282, 2.897475, 0.532682, -2.069995, -2.510436, 0.142607, 0.483538, 0.606211, -0.185770, 0.913036, -0.824144, -2.868243, -1.664191, -0.130939, -1.009496, -2.834906, -2.953358, 1.251532, -2.528788, 0.715784, 0.619456, 1.275110, -0.968184, 0.420924, 0.751423, -0.012198, -3.663082, -0.932058, 2.118438, -0.064653, -0.722701, -0.739168, -0.247635, -0.043738, -0.452323, 3.432029, -0.554806, 2.927828, -0.212855, 0.003955, -0.357271, -2.083921, -0.759885, 1.128178, 0.407497, -0.346585, 0.254764, -0.036644, 1.292589, 0.889476, 4.094336, -1.213452, -0.292327, 0.835652, -1.123920, -1.357339, 0.583533, -0.212889, -2.109420, -2.039322, -2.049470, 1.393434, 0.227105, -1.943285, -1.751864, 0.368406, -0.076198, -0.553293, 0.431516, 1.632055, -2.800219, 0.425088, -0.216394, -0.355467, 3.298864, -1.703126, -0.529324, -3.259636, -0.005124, 2.216942, 0.056490, 1.522583, 0.341275, 0.279314, -0.579731, 2.031629, -1.494830, 1.357635, -0.870218, 0.445755, 1.674716, 0.256306, -0.997950, 1.777514, 2.048767, 0.416811, 4.965404, 0.141465, 0.773394, 1.545753, 0.027247, -3.331308, 0.706755, -2.109440, -1.031737, 0.457077, -0.085427, -0.124898, 2.836118, 0.668133, -0.377359, 0.098304, 0.442726, 0.469875, 2.438758, -0.113918, -0.708185, 0.850951, -0.269033, 1.265060, 1.252891, -0.723365, -0.106239, -0.572127, -1.127482, 0.805131, 0.789896, -1.664021, -0.853497, -1.080191, -1.777647, -1.330180, 0.512299, -1.983276, 0.702110, -7.340586, -2.208733, -3.390751, 0.570193, -0.615102, 1.125478, 1.215835, 0.822542, 0.171175, -0.069884, -0.123099, 1.784375, -2.423002, -0.737408, 0.343006, -0.787060, 1.550688, -1.225983, -4.489320, -0.243526, -1.443461, 0.465390, 1.505774, 1.369661, -1.840881, 5.110126, 2.507883, -0.404607, 0.081059, 0.555840, -1.737800, -0.521636, 0.646257, -0.794755, 1.622429, -1.143865, 0.247397, -3.387873, -1.999178, -0.475409, 0.254099, -1.216121, 0.947898, -0.436287, 0.588959, 3.871037, -2.230380, -1.394659, 0.590486, -1.651382, -2.237124, -3.214393, 0.829730, -2.667794, -1.414842, -2.266326, -0.213557, 3.285220, 0.244696, 1.276757, -1.010511, 0.954373, 2.183401, 0.428658, 0.591693, 0.545552, -0.059087, 1.103354, 1.915429, 2.638312, 0.722118, 0.093238, 0.864807, -0.179161, 2.299291, -4.168321, -0.407841, -2.142166, -0.239048, 2.303017, -1.151473, -0.927492, -0.527071, -0.511980, -0.261069, -0.298833, 1.963749, -0.033439, -1.881962, -0.522094, -2.649604, 1.647931, 0.776876, 0.432121, -0.191135, -2.145623, -1.231426, -0.372664, -0.452278, 1.274758, 0.170175, 1.026496, 0.073504, 0.640993, -1.226367, 0.783692, -0.448901, 1.416342, -0.930619, -0.322302, -0.675186, 1.486036, -2.058913, 1.081755, 1.013078, -0.047287, -0.117357, 0.994664, 0.758084, -0.193374, 0.876904, 0.891312, 1.532211, -0.034118, 0.127642, -0.266024, -0.440258, 0.074581, 1.515373, -9.780887, 0.753208, 0.330636, 2.945389, -0.220795, 0.140223, -0.131821, -1.725950, -2.171350, -0.204008, 0.595772, -0.116645, 1.457196, 3.947017, -0.784874, -0.888277, -1.782836, 0.817804, -0.192799, 4.299161, -7.505760, 1.463305, -0.106789, 1.174378, -0.811028, 2.220318, -0.583385, -1.241430, -1.802632, -0.958205, 2.339732, 2.392081, 0.028450, 0.596011, -1.068670, -0.249349, -2.410244, -0.316100, 3.512356, 1.815074, -2.563461, -1.023207, -1.255088, 0.645512, 0.079494, -0.294559, -0.094625, 3.387850, 0.226355, -2.717346, 0.559185, 1.418335, 0.593394, -1.490641, -0.128680, 0.076089, -1.944839, -1.451210, 2.537951, 0.006042, 0.122956, 1.893836, -0.406169, 0.839640, -0.788404, 4.068511, -0.677647, 1.193237, 1.380761, -1.895364, 1.780263, -0.083210, -0.052489, -2.717657, 0.885126, 0.945189, 0.714716, -0.331114, -0.276344, -3.294456, -1.361389, -1.806947, 0.547034, 0.398358, 2.003053, 0.901725, 0.410779, -0.864764, -1.618749, -1.349252, -0.911480, 2.053375, -1.349382, 0.849684, 1.225166, -0.703413, 1.470940, -1.646121, 1.467508, 1.322925, 1.738584, -2.498531, -1.925045, -0.384096, -0.101580, -2.523852, 0.767522, 0.719983, 1.575168, -0.290012, 1.028273, 1.360790, -1.643553, 0.725088, -3.634740, -0.672812, -1.429943, -0.466053, -1.564547, 2.244541, -0.990287, 0.477694, 0.142603, -1.236258, -0.260318, 0.138124, 0.848498, 0.986988, 1.307112, 0.753924, -0.791700, 0.239069, 1.626977, 0.944337, 2.356667, 4.252893, 1.440436, -0.834279, -3.924565, 1.486321, 1.781534, 1.588995, -2.617434, -1.426156, -1.792942, -0.477611, -0.760941, 2.425961, 1.197546, 4.233758, -2.818418, -0.567543, 1.363195, 0.341048, -2.289856, 0.309621, -0.637264, 0.347269, -0.461381, 2.523387, -1.344993, 0.216397, -10.708355, 0.919107, -2.543254, 1.904163, -0.058510, 2.088098, 0.810954, -0.385477, -1.210636, 0.094202, 0.535237, -1.502995, 0.344693, -1.197936, -1.386554, -0.128568, 1.160851, 2.858701, 1.907802, 1.539349, 2.433446, 2.448377, 2.303145, 4.565366, 2.033096, -2.155319, -0.801175, 0.520414, 1.972140, -2.310103, 0.173755, 0.974195, -1.724003, -1.241399, -2.017481, -2.318946, 0.835495, -1.525946, 0.359016, -2.165900, 0.091703, 4.673479, 1.606368, 0.459343, -0.392127, -1.453846, -0.353931, -2.310848, 1.236771, -0.827690, -1.652173, 8.877085, 2.143196, -1.679855, 0.654059, 2.409938, 0.361839, -1.841513, 0.169567, -0.140878, -0.171590, 1.312745, -0.134062, 0.724259, 2.047288, -0.841137, 1.260102, 1.002513, -0.647772, -5.369482, 5.233755, 0.599639, 1.172586, -1.073408, 0.885488, -1.003686, -3.339315, -0.038161, 0.559816, -0.626176, -1.193738, 8.828257, -1.190498, 0.449845, 1.863093, 0.556650, 1.450109, -0.126581, 0.105580, 0.397996, 0.313131, 0.914442, 0.404302, -3.006561, -1.363588, -0.628297, 0.217007, -2.647120, 2.862575, -8.684146, -0.418185, 1.437103, 0.828107, 0.458347, -0.435039, 1.646789, 0.926643, 2.910391, 0.709955, 0.771045, 0.037429, -4.088374, 0.162264, -3.077456, 1.285309, 1.316735, 1.387373, -1.066935, -0.645049, 1.368517, -1.300493, 1.187293, -0.031566, -0.225020, 0.646678, -0.172955, 3.641774, 0.928097, -1.952956, 0.098479, 0.563732, 2.789780, 1.441335, 2.300636, 1.045073, -1.739837, -0.522630, -0.936236, -0.113540, -0.295716, 0.952094, -12.808439, 0.464352, 1.135275, -1.872086, 0.042147, -3.117124, -0.354251, 2.127711, 0.385427, 1.285458, 0.280779, 1.432706, 3.229187, -0.091171, -2.339383, 2.241873, 0.316786, -2.255201, -0.556684, 2.200058, -0.417242, -1.281321, -0.334675, -0.578597, -1.088588, 2.267492, -12.715529, -0.348637, 1.471534, -1.676870, 1.577533, 0.676559, 0.080321, -1.086842, 0.430531, 0.437493, -2.491856, -1.025787, -5.674278, 0.392208, -4.118344, 1.785830, 3.346591, 0.304010, -2.162539, -1.001513, 0.018220, -2.174213, 0.871597, -0.414440, 0.716925, 0.457252, -1.066866, -1.192133, -1.134685, -0.413272, -0.989214, -0.402608, 0.400279, 2.370824, 0.565737, -1.827672, 0.184565, 1.494704, 1.196620, 1.353731, -1.431783, -0.185922, -0.168908, -0.945395, -1.528083, 0.495618, 1.339592, -0.752455, -0.017693, -1.226821, -1.587509, -1.819557, 0.250778, -0.129144, -0.049336, -0.556312, 1.388641, 5.209940, 7.888215, 2.105105, -4.130414, -1.113280, 0.696917, -1.569371, 1.314536, -1.086163, 1.195435, -0.062427, 1.109358, 2.636901, 0.217930, 0.296666, -1.974165, -1.735777, -2.618426, 1.400363, -0.096162, 0.028053, 2.971062, 0.302502, -1.890169, 0.857939, -2.612601, 1.933837, -2.467517, 1.603533, 1.537552, 0.085175, -2.052856, 0.524604, 0.883950, 0.739990, 1.870276, 1.625467, -4.919993, 1.174270, -2.426991, 1.683943, 2.972154, -1.803723, 1.008075, 0.248989, -0.385458, -0.196188, 1.270961, -0.229944, -3.168777, 1.614013, 1.595189, -0.410707, -1.044544, 0.778994, 0.653459, 0.350870, 4.649518, -1.140979, 0.838352, 1.230573, 2.527621, 1.062342, -0.298138, 0.888464, 0.313968, 1.429981, -1.374527, -0.592750, -1.111212, 0.250075, -1.495723, -0.555394, 1.749684, 1.169427, 0.184614, 0.222946, -2.256650, -1.231119, 1.744124, 2.778740, -0.821602, 0.101260, 1.169010, 0.798608, 0.031937, 0.170795, -7.833639, -0.522136, -0.191751, -0.644957, 1.360851, -0.476764, 0.153982, 1.444253, -0.697542, -1.839324, 1.011768, -1.918130, 1.279057, -0.911504, 1.704320, 1.411318, 0.101799, -1.732975, 1.152074, 1.164443, -0.210665, -0.760696, 0.550097, 0.398409, 1.169000, 4.403631, 0.044010, 0.429579, 0.020382, 1.313668, 0.072535, -0.443568, -0.970095, 0.235293, 3.075127, -0.917624, -0.191897, 0.649122, 0.604821, -2.162972, 1.809951, -0.566550, -0.368535, 0.561067, -1.963957, 0.591505, -1.989305, 2.010246, 1.025328, 1.118531, 0.765658, 1.132761, 0.590744, 0.309546, -0.072684, 0.927601, 0.402006, -0.279634, 3.336041, 0.363340, -0.084176, -0.376793, -0.792103, -0.910649, 0.842618, 0.704083, 1.336435, 0.633091, 1.232460, -0.126867, 1.860313, -0.582771, 2.027243, -0.734689, 0.396841, -4.301981, -2.570962, -2.346615, -0.294627, -9.963593, -0.950176, -0.416823, 0.185868, -0.575314, -0.248928, 0.115231, -0.267261, -0.893238, -0.038151, -1.312479, -0.859603, -0.981597, 0.905031, -1.797287, 1.575514, -0.293734, -2.374883, -2.280471, -0.114908, -8.689425, -3.053759, -0.254290, -1.284394, -2.571275, -0.515252, 1.703866, 0.979483, -1.641806, 0.677989, 0.632244, -0.935906, -0.334838, -0.395663, -1.000913, -1.619586, -0.240378, -0.943641, 2.728803, -0.003069, 1.764210, -1.329910, 2.901473, -0.663974, -2.739002, -0.557714, -1.467272, 0.430610, -2.140990, -1.018700, -8.117669, 2.187021, 1.585709, 0.303729, 2.666512, -0.322125, -0.510872, -2.895965, 0.332371, -0.563060, -4.857492, -0.213120, 0.974745, -0.822440, 1.557947, -4.240340, -2.603575, -0.108165, -1.109751, 2.159279, -3.169286, -0.512767, -0.863623, -0.091780, -1.966987, 2.861250, 0.022738, 1.359011, 0.319737, 1.852149, -0.047252, -0.426285, -0.759734, 1.233245, 1.222208, -0.428217, -1.377964, -0.014197, -1.666247, -0.693576, -2.364216, -2.303179, -1.875806, -2.193077, -2.161515, 0.730956, 0.106795, -0.440026, 0.882222, 1.271376, 0.223913, 0.026457, 0.875005, -3.332191, 0.711084, 1.622856, -1.125061, -0.091555, 0.591242, -1.053302, -0.374932, -0.451309, 0.116873, 0.916229, -1.526904, 1.396491, -4.383001, 0.277899, -0.744718, 3.206128, -0.688903, -1.531305, 3.416818, 2.721693, 2.085379, 3.218796, -6.358753, 1.035356, -2.443428, -1.629758, -0.956840, -2.862966, 1.097265, -0.132868, -1.223482, -0.695732, 0.078407, -1.925692, -1.591343, -0.446446, -0.413490, -3.946516, 0.004555, -0.718744, 0.671976, 0.724541, 0.277439, -1.528037, 0.149476, 1.957371, 0.107017, 1.056521, -0.852286, -1.079987, 1.865555, 1.076413, 0.002297, 2.019732, -3.465175, -1.121418, -0.474052, -1.834379, 0.894442, 0.671292, 1.058087, -0.456843, 1.975728, 1.927830, 0.147125, -1.154134, 0.253917, 3.004951, 0.158338, 0.203460, 0.986412, 0.876946, 3.303209, 0.924066, 2.434523, 1.368330, -1.493254, 0.657588, -2.332864, -0.716405, -1.623935, 0.734781, 2.475377, -3.862651, 0.639123, -1.024320, -2.099458, -1.505555, -2.106091, 0.105197, -1.276832, -1.604650, 11.007366, 16.257780, 1.212517, 0.889604, 3.437125, -1.665995, -0.542497, -0.793582, 0.072068, 0.891223, -1.907271, 1.315802, 0.976274, -2.103378, 0.598498, 1.317193, 2.073358, -5.785201, 0.120337, -0.128886, -0.625547, 1.823134, -0.627097, -0.074800, 0.489213, 0.000787, -0.695088, -0.151693, -1.164222, -0.719642, -4.420443, -1.641346, 0.223747, -4.239794, -2.342813, -0.701424, -0.777337, -0.055989, 2.989524, 1.142629, -1.108115, 1.921261, 2.104734, 0.713200, -1.836161, -0.769459, -0.096665, -1.405661, 1.133891, 0.677240, -0.079669, -0.786968, 0.757151, -1.115593, 1.729968, 3.429117, 3.333342, 0.889537, 1.064027, 2.452963, 0.146189, 1.487560, 0.159354, -1.003925, 3.638438, -1.171568, 0.858463, 1.559678, -1.309983, 0.294997, -1.911492, -0.658933, -3.117077, -1.047560, 0.714687, -2.235785, -0.924417, 1.562038, 1.057431, -0.458399, -0.513814, -0.561772, -1.739037, 1.467458, 0.620015, -0.726114, 0.533329, -0.823658, 0.602171, -4.322502, 0.547450, -0.053952, 5.291243, 0.374018, 0.543503, 0.658229, -1.971978, -0.861369, 0.358919, 0.152244, 0.556775, -2.041901, 0.755251, -0.084016, 2.247265, -1.645395, -0.975744, -0.170778, 2.687211, 0.454293, -0.637791, -0.689573, 2.783953, -0.462319, 1.623681, -0.080127, -2.483746, 0.395303, 2.702187, 0.863832, -0.829897, 1.051659, -0.179475, 1.012557, -3.725295, -2.425763, 1.506878, 0.261926, -0.952743, 1.053141, -0.772923, -0.683897, 3.952535, 2.515576, -1.145893, -1.876673, 1.160748, 0.543679, 0.466676, -0.385127, 1.240535, -0.393820, -2.291663, 0.888950, -1.278874, 0.038750, -0.752375, 2.060377, -0.336541, -1.480818, -1.426695, -1.427118, 2.019039, -2.795073, 4.602587, -0.728483, 0.066261, 0.140672, 0.995434, 6.288233, -0.336479, -1.012862, -0.521627, 2.346851, 1.099883, -1.747922, 0.289940, -2.412572, -0.726209, 1.113508, 1.037497, -0.935774, 1.114324, 1.137784, -0.378494, -0.369090, -0.495506, 0.839105, -4.885697, 2.921585, -0.534774, 2.706858, -2.904579, -3.026774, 1.074224, 0.988017, -1.126216, -2.458224, -0.514051, 0.257682, 1.672144, -3.153304, -0.512526, -1.063563, 0.611227, -0.169314, 0.288846, 0.492560, 0.557903, 1.448651, 1.216152, 1.514556, 0.348303, -1.505949, -0.923314, 3.453034, 1.904600, -0.675914, -0.474720, -0.985341, -0.153247, 1.354074, 14.742016, 0.398167, 2.527965, 1.902573, 0.569317, 0.713961, -0.157248, 0.583093, 1.446733, -0.671570, 3.754446, -1.230091, 2.326427, -1.630548, 1.877837, 0.963890, 2.939709, -0.352764, -2.149770, 0.465506, -0.063179, -1.710148, 0.230100, -0.749501, -1.599947, 0.353103, -1.376222, -3.326641, -0.228345, 0.709135, 0.279200, -0.913855, -1.260106, -1.632902, -0.528416, 0.245233, 1.025672, -1.362333, -1.633941, -0.171049, 0.478239, 2.395704, 1.798306, 0.518881, 2.364272, -0.769217, 2.540560, 1.193590, 2.762551, 1.171583, -0.288009, 3.315905, 0.178900, -0.414532, 1.629692, 3.644429, 0.047435, -0.409813, 1.277344, 1.235892, -0.370595, -1.994537, -1.594649, -2.062027, 4.117460, -1.693915, 0.134951, -0.276196, 1.733694, -0.138643, 0.993175, 0.659948, -1.959156, -0.568687, -0.549599, 1.432800, -1.232484, -3.082007, -0.631581, -2.420118, 2.023217, 1.324583, 0.041360, 3.613342, -4.568541, 0.285068, 0.292985, -0.218380, 3.284291, 1.405751, -0.301350, -1.727242, -1.090432, 2.118229, 0.914769, 0.380845, 0.296591, 0.511121, -0.837049, -0.388810, -0.239591, -4.444277, 4.205221, -0.768113, -0.578623, -1.128701, 1.552697, 0.367937, -0.099581, -1.281450, -0.908604, -0.632442, 13.450798, -2.382877, -3.409620, -1.395153, -0.904329, -0.477906, 2.242418, 1.626651, 1.743318, 0.201355, -1.874433, 1.069358, -1.122438, -0.513217, 0.798678, -2.774002, -3.890982, 1.194817, 0.417884, -2.555126, -1.121142, -1.795019, -0.905862, 0.113841, 0.568253, 0.533732, 0.223853, -0.951292, 0.625884, -0.039080, 0.570316, 1.236212, 0.025467, -1.700714, 5.611750, 1.285293, 0.344259, 0.667707, -0.425748, 0.564495, -1.901882, 1.431199, -1.215952, 1.253814, -0.354065, 0.312732, -3.331754, -1.358605, -3.347685, -1.173771, 2.645103, -2.667670, -1.189343, 0.628767, 0.290056, -1.889051, 1.207483, -0.013234, -1.082175, -0.463480, 1.240325, -0.788110, 3.318880, 1.457310, -1.047825, 0.244308, -1.829554, 3.589651, -0.124967, 2.177041, -1.269532, 0.687182, 0.477322, -0.286502, 2.742045, -1.479311, -0.366762, 0.130173, -3.005942, -4.997714, -1.074178, -1.553302, 2.917926, 0.381329, -1.468071, -1.214693, 0.776294, 0.101943, 2.781591, -0.340745, -0.709129, 1.041831, 0.696277, 0.774845, 2.691426, 0.668740, -0.311207, 1.233782, -1.170120, -0.034383, -1.132921, -1.765879, 0.484065, 0.475109, 0.550853, 0.381090, 0.574175, 0.871582, 2.628418, 1.365183, 1.224013, -3.376759, 1.086175, -0.021419, -2.019457, 0.234232, -3.502301, 1.270517, 4.130678, 1.725621, -2.442235, -10.042397, -0.838784, 0.022949, 2.967072, 0.118950, 2.288779, 1.842968, 2.072146, -0.317232, -1.301560, 0.340098, 0.647872, -0.179999, 0.620119, -0.987134, 1.012957, -0.834723, 0.834885, 0.751951, 1.262253, -1.385338, -1.551879, -1.337877, -1.756028, 0.510630, -2.394951, -0.205486, 1.575394, -0.480835, -0.409149, -0.437463, -2.282324, -1.881278, 1.116361, 1.136089, 24.378939, -0.393524, -3.075753, 1.290074, 1.087091, -2.154333, 0.970394, -1.926258, 1.618542, 0.412278, -1.739195, 1.623174, 1.373538, -0.979996, 0.275235, 0.773335, -0.732338, -2.702239, 0.798534, -1.287799, 35.395630, -0.367900, -1.554014, -3.089915, -0.288484, -2.492246, 1.115393, -1.127128, -3.936405, -0.125779, 0.485530, 0.389982, -1.279955, -1.730957, -0.593967, 0.948370, -2.064061, -0.272153, 1.658313, -1.792256, 1.410611, 1.607257, -0.524023, -1.193940, -0.477432, -0.203510, -5.396678, 0.878312, -0.600852, -0.958319, -1.770715, 2.374568, -0.837991, 0.792972, 1.333605, -2.388574, -0.201241, 0.498677, 0.179003, -1.049678, -1.267674, 0.086085, 1.527040, -0.410888, 0.113187, -0.560847, 0.438254, -0.419042, -8.495057, 1.416575, 0.312171, -4.181562, -3.007250, 0.894191, 0.282944, -0.465586, -1.058151, -0.204914, -0.380061, -2.493015, -0.319261, -2.098388, -2.529561, -2.387559, 1.457654, 2.064481, -0.206003, 0.331642, -0.909741, -0.725373, -1.512754, 0.902448, 1.214116, 0.187296, 0.355552, -1.346207, 3.032812, -0.008650, 0.665337, 0.523351, 2.378387, -3.201352, 1.975095, -0.343192, -0.887129, 0.839616, 2.507690, 0.034270, -0.425381, 0.244614, -1.063135, -0.416553, 0.151447, 0.544476, -0.904222, -1.981529, 0.008429, -1.530195, -0.380718, 2.639323, -1.974096, 2.537336, 0.128485, 0.057991, -1.839553, -2.247260, 0.850893, -0.609513, -0.073828, -6.912158, -0.139270, 0.179913, -1.241016, 0.320185, 1.688670, 2.298871, -0.673911, 1.046304, -2.560003, 2.457667, -1.773535, -0.873110, 1.303595, -1.350625, -2.650466, 1.821359, -0.091552, -1.048395, 4.371029, -1.531572, -3.530663, -7.938312, 0.371839, -3.025643, -1.249872, 2.221552, 3.361130, -0.400779, 1.653993, 1.089241, -0.370755, 1.477701, -0.303671, -0.078472, 1.358668, -0.792539, -1.049296, 0.496935, 0.733483, 1.849233, -1.298069, 1.658483, 1.070524, 1.064211, 2.341386, 2.022804, -0.712600, -0.828958, -4.590330, -2.193731, -1.187169, -0.390252, -1.434384, 0.475188, 0.472390, 1.016687, 0.816242, -2.084652, -0.264609, 2.821970, 0.642034, 0.144703, -0.085207, 3.882366, -0.335585, 2.945193, -8.780211, -1.788703, -0.816980, -0.324785, -0.251749, 0.550231, 1.536315, -0.976531, -0.001636, 0.025487, 2.723643, -1.050987, 1.859178, 1.747509, 0.632397, 0.674326, 1.942476, 0.170618, 0.628320, -2.931181, 1.509731, 0.052616, 0.396503, 2.053801, 0.786840, -1.765508, -2.049108, 1.391213, -1.846742, 0.302443, -0.235106, -4.113284, 2.388442, -4.504088, 1.407906, -0.792122, 0.238252, 0.770481, -2.253059, -0.076944, -2.216919, -1.822923, 1.290645, 1.323500, -1.547410, 1.743029, -1.069493, -0.496441, 0.411108, 0.256616, 1.004339, -0.964894, 1.061500, -0.336986, 0.609277, 1.396947, 0.389251, -0.800786, 2.296143, -0.148887, 1.665507, -1.662055, -0.582016, -2.904270, -1.138959, -1.115410, 0.504910, -0.440490, -1.917381, 1.905744, 2.691540, 0.668207, -1.137727, 1.715427, 2.627678, -0.848056, -2.206196, -0.485603, -0.104143, -1.581868, -3.105368, -0.136548, -3.890651, 2.893072, 1.311447, 1.207675, -0.110239, -0.015520, 3.073158, 0.722297, -0.317380, -1.639348, 2.564957, 0.707945, -2.089616, -1.077968, -1.993640, -0.184228, 2.603096, 0.495441, -0.690588, -1.996336, -0.615599, 1.252049, -1.305657, 2.258763, -0.367589, 0.744874, -3.983390, 0.641868, 0.682344, 1.534273, -0.078887, -0.638862, -0.436887, 1.282581, -0.474767, 0.933970, -0.562142, -1.196221, 0.620703, -1.794334, -0.201897, -0.273618, 0.759434, -4.578209, 1.045878, -0.396665, -1.393743, -0.258993, 0.909860, 0.065080, 2.534648, -0.621033, -2.463706, -1.267582, -0.949879, 0.684556, -3.153200, -1.322336, -0.846883, 0.756270, -1.246623, 1.325826, 0.269144, 0.655697, 1.508641, 0.955546, -4.049324, 1.236385, -2.208986, -2.164684, 0.483166, -1.556875, -2.399807, -0.071894, -0.598155, -1.504630, 0.909837, 0.441980, -0.142267, 3.186562, -9.105115, -0.706504, -0.904092, -1.254219, 1.707472, -0.484811, 0.243699, -1.906812, 0.557756, 0.399335, -0.668094, 1.150291, -33.314419, -4.579463, -0.827204, -2.219878, 0.864965, 0.843359, -3.120395, -3.499207, -1.056821, 2.018512, 1.058076, -2.007281, -1.928121, -3.263878, -0.079084, -1.419108, 0.705255, -2.721349, -0.210468, 1.977742, 0.119329, -3.372694, -1.065624, 0.044773, 0.949414, 1.982919, -2.992223, 0.043504, 0.458767, 2.387087, 1.143641, 2.626701, 1.635916, 1.130192, -1.146586, 2.295970, -0.108903, 1.978274, 0.249793, -1.822635, 1.072817, -1.037818, -1.654801, 0.506986, 0.223898, -0.801533, 0.471886, 0.821947, 0.165247, -1.029804, 1.522923, 1.681942, -34.283497, -1.443581, 3.120758, 0.109961, 1.252267, -1.838808, 4.105927, 1.430098, 0.795596, 2.285416, -0.350729, -0.183396, 2.008162, 2.535472, 1.442522, 0.067094, 1.726215, -0.465237, 0.228092, -0.898866, -0.351523, 0.943222, 2.762748, -0.150182, 0.072098, -3.666024, 0.861893, 1.339004, -4.193988, 0.934029, 1.222872, 1.534581, 0.536860, 0.175307, -4.649261, -0.155842, 0.486639, -2.415943, 4.921558, 0.074272, -0.523580, 9.426488, 1.450721, -0.273441, -0.685305, -0.208207, -0.141061, -0.689686, -0.430765, 1.203310, 0.457785, -0.636253, 1.884096, 0.180858, -0.601352, 1.059752, 1.353793, -1.144091, 1.012244, 0.111028, 1.282956, 0.539753, -2.136105, -0.362957, 1.343894, 1.246425, 0.270625, 0.846098, -1.814227, 0.723642, 0.896651, -0.492568, -0.483125, 1.335945, -1.528607, -12.522959, -2.514193, -0.385231, -0.627579, 0.507236, 1.104174, 0.487314, -0.776387, -0.105157, 2.839599, -0.512341, -1.799638, -1.330242, -0.425150, 0.598084, -9.454448, -0.575648, -1.355067, -1.314143, -3.041453, 0.646404, -0.381917, -0.618457, -0.122956, 1.836147, 0.686982, 1.346205, -0.397530, 0.177667, -0.312596, 0.959667, 0.490264, -0.229002, 2.652546, -0.523528, -0.295107, -0.722657, -0.770971, 0.501242, 2.509320, 0.498323, -2.765550, -0.476504, 2.484814, 1.173968, 0.066951, -0.033656, 1.939883, 0.595368, -0.492711, -0.485204, 0.524125, 2.012069, 1.902331, 1.441705, -1.873747, 8.485007, 0.809767, 2.354154, -1.817922, -0.857522, -1.486599, 0.853559, -3.121060, 1.469213, -0.247292, -0.377132, -1.203432, 1.732122, -0.968094, -2.430906, -0.646179, -0.096946, -1.891861, -1.449054, -1.289801, -0.931208, 3.503360, 0.109478, -2.145627, 1.445237, 0.036277, 0.486036, -1.857045, -0.748751, 0.455339, 0.367962, -2.402283, -0.665134, 2.883188, 0.057114, -2.703108, -1.443505, 0.250365, -0.242302, 3.324877, 0.508369, -0.850818, -1.103852, 0.660547, 3.891412, -0.431533, 0.720094, 0.351299, -1.011191, 2.850002, -2.083087, -1.732875, -0.878147, 0.754441, 1.108804, -1.122806, 0.565206, 0.396117, 3.356467, 0.028610, -1.452879, -1.373615, -0.454277, 0.141389, 3.266813, 12.898273, 1.677444, 0.586111, 1.705890, 1.566172, -2.538870, -2.231463, -1.102204, -2.527648, -1.085532, 0.478172, 0.187650, -6.529508, 0.846004, 2.000021, 0.900888, 1.543347, 1.928105, 0.956930, 1.169501, 2.499456, 0.512169, 1.320805, 0.813092, -0.862943, -0.914188, 1.019531, -1.543529, -3.052305, -0.000092, 0.458211, 1.184352, -0.189562, -0.504885, 0.651876, -2.231333, 0.150909, 2.522482, 1.546768, -1.469670, 0.283542, -0.038828, -0.221745, -0.613487, 0.986294, -2.087503, 1.868805, -0.862680, -2.028013, -1.342783, 2.996248, -0.964869, -0.889650, -1.940610, -0.491476, 4.659435, -0.629713, 0.880112, 0.013399, 1.543464, 1.811594, -1.919105, 0.579925, -0.054963, -0.611301, 1.213541, 0.044412, 2.428571, 1.650359, -0.011562, -3.940373, 0.219158, -0.250527, 2.196884, 2.333903, 2.026985, -2.534095, -1.620452, 1.754802, -2.421583, 0.130233, 1.375082, 1.599321, 1.592287, 1.072141, -1.522067, 0.396378, -1.969304, 1.184472, 1.064752, -0.051353, -0.429749, 3.536501, 1.289551, -1.872306, 0.962450, 0.012061, -0.633179, -5.554706, 3.344516, -1.520184, -0.216105, -1.681684, 4.008332, 2.174610, -0.961140, -0.686181, -1.402003, -0.401292, -0.713743, 3.171560, -0.512009, 0.219556, 0.559843, -0.798941, -2.584601, 1.708179, -1.446687, 3.116506, 0.701865, 0.474630, 0.662607, -0.341657, 3.770340, -1.970125, 0.079163, -0.033681, 2.390205, -0.802647, -0.419331, -0.633927, 0.158110, -1.025442, 0.967359, -0.777828, 0.594151, -0.833795, -0.444425, -1.413129, -0.685053, -0.072876, -0.182840, 0.716580, 2.238798, -0.586169, 1.660746, -1.904589, 4.398918, -1.573961, 0.223843, -2.490864, 1.426156, -1.309565, 0.297844, -0.122553, -4.125076, -1.175787, -0.808162, 3.899071, -3.727594, -1.969113, -1.416097, 0.433778, 0.566248, 2.189430, 13.170939, -0.437235, 1.563687, -1.015102, -10.265989, 0.488794, -1.776708, 0.143081, -0.918150, -1.271382, 4.251751, 2.227284, -0.468386, -0.818391, -1.076629, 0.803174, -6.778745, -2.332751, 3.314303, -0.854069, 2.525487, -0.036530, 0.712685, -2.247428, 1.711706, -0.839512, 2.300299, -0.371730, -0.143227, -1.000555, 2.392848, 3.181103, 1.384507, 0.174168, 0.332910, -0.797669, -1.976347, -0.308509, -1.638045, -0.427884, -1.578577, 1.571453, -1.381518, -0.068900, -0.008149, -0.877026, 0.584573, 0.371052, -0.273352, -0.496754, 0.173186, 1.495046, -2.134991, -1.174339, -2.233019, -1.859445, -0.982453, 1.005392, -3.032267, 1.807362, -1.046106, 1.126163, 1.836564, 1.041053, -0.354948, -2.419614, -1.992808, 1.194339, -2.727120, -2.960786, 1.990473, -0.249863, -2.641395, 2.517450, 1.227859, -0.836136, -0.104000, 1.062187, -1.290700, 1.327066, -0.019623, 5.412015, 0.280590, 2.486149, 0.634194, 0.961871, -1.225696, 2.310711, 0.309103, -1.714766, -0.548396, -0.683925, -3.823319, 1.304894, 1.286261, 0.745414, 3.101287, 0.891395, 1.379889, 1.123019, 1.053005, 2.294336, 1.014520, -2.056264, 0.847806, 1.838467, 0.661211, 0.911672, 2.108672, -1.296724, -0.994294, 2.204733, 0.060958, -3.965935, -0.375877, 4.837624, 1.608993, 0.728340, 0.086287, -0.533132, 0.689582, -0.234022, -0.732648, 1.564658, 1.145785, 1.684504, -0.951721, -2.480582, 0.639165, -0.163595, 0.932444, -0.321780, -1.511633, -0.700274, -2.130924, -0.589078, 2.558939, -0.031520, -0.466372, -0.661476, -0.310624, 0.919263, 2.138045, -2.980811, -1.367038, 1.232296, 0.824462, -1.153319, -1.915287, -0.933534, 0.126949, -0.378158, 0.226273, -0.199483, 4.345228, 1.491282, -15.042450, -0.682710, 0.315321, 0.360537, 0.098894, -1.003842, -1.970495, 0.370964, 0.917547, -2.802694, -0.426734, -0.872954, -0.628911, 0.498273, -1.146430, -0.303363, 0.423223, -1.333830, 3.313324, -0.368433, -0.129785, -1.392973, 0.117655, 0.907470, 0.862268, 30.468317, 1.916252, 0.630351, -2.087536, 2.504394, 1.090119, -1.520968, -0.478656, -0.055788, -1.631508, -0.471805, 0.263214, 1.737623, -2.067608, 0.341913, -0.636885, -1.703791, -0.276518, -1.013657, 0.735331, 0.755858, -7.211546, -2.701548, -1.511194, 0.885756, 1.038019, -0.747457, -1.262162, -2.229699, 0.619854, -0.506964, -2.198322, 1.772692, 0.917558, -0.345676, 0.773990, 0.077003, 0.222814, 0.570620, 13.318283, 5.154265, -0.133818, 1.382821, -3.381096, -1.348515, 1.883829, 1.128500, 1.431208, -2.484697, 2.832257, 2.581187, -0.643860, -0.449537, 2.258231, -0.865092, -2.880285, -4.159235, 0.895552, -0.865830, 0.576851, 0.660565, -0.503361, 2.477618, 0.888527, 0.642267, -0.203590, -0.203652, 0.518252, -0.481242, -1.399775, 0.071838, -1.687822, 1.408637, -1.714611, -1.262840, 0.550977, 1.361458, 1.093706, 0.218738, 1.694335, -0.816886, -1.712049, -1.776350, 0.478118, 0.873915, 0.157106, -6.387165, 0.410236, 2.402042, -1.496522, 0.605822, -1.822789, -1.357874, 0.902568, 0.476761, -0.997130, -0.898986, -0.416213, 0.628011, -0.191016, -2.507825, 2.000534, -1.100833, 1.287321, -0.746681, 0.251129, -0.452919, -2.379659, 0.467141, 2.813297, 3.019051, 0.894045, 0.513003, 0.247807, 2.164137, -0.861424, -1.187832, 1.431251, -1.880267, -2.707241, 0.882622, -1.083287, -2.503087, -0.331748, -0.253308, 1.218072, 0.465451, 2.239259, 0.033480, -0.988318, -1.066254, 1.289019, 1.244425, 1.395825, -2.000483, 8.393459, 2.063885, 0.351303, 1.736136, 1.678012, 0.466673, -0.735065, 2.319815, 0.587236, -0.219373, 4.404061, 0.069035, -0.615926, -1.054781, -0.281119, -2.315298, -0.598869, -1.699938, 0.399952, 1.414117, 3.159378, 0.971840, 1.468741, 0.738671, -1.217917, -0.005656, -1.837169, -1.033584, 1.689942, 0.296139, -0.159082, -0.282257, 1.146659, 0.589294, 1.239523, 0.584590, 0.732567, 1.415604, 0.229542, -1.803726, 1.402160, -2.028142, 1.227429, -1.287164, 0.215315, 0.091171, -0.837675, -1.208822, -0.082121, -0.389508, -0.755331, -1.751180, -0.248557, -1.661491, 0.068512, 2.337994, 1.096209, 2.914422, 1.787869, 3.141023, -3.783829, 1.704273, 1.094445, 0.711651, -0.020473, -0.531446, -0.418532, -0.951771, -1.334428, 3.076059, -8.316907, 0.220659, -0.314838, -1.326944, 0.409516, 8.980949, 0.336135, -0.271100, 0.718912, -0.735141, 2.867171, 0.454470, -1.684832, -0.667039, 30.702013, 0.883889, 0.531766, -0.452834, -1.168136, -1.349033, 1.579897, -9.245929, 3.323505, 0.975022, 1.259668, 1.623177, 1.048897, 0.252429, 0.494380, -1.923661, 0.298914, -1.570870, -2.745278, -1.792378, 0.748294, 5.594098, -1.439187, -1.536032, 2.820908, -0.333814, -0.420440, 2.032806, -1.904369, -0.007114, -0.871720, 0.579277, 0.640428, -0.765459, 0.173253, 0.786739, -0.283481, 0.956241, -1.522335, -3.105874, -2.173901, -0.282985, -1.197513, -2.059102, -1.084585, -0.648405, -0.228381, -0.374229, 0.766084, -2.314189, 0.978999, -0.764588, -0.302045, -1.880411, -0.740701, 1.435884, 0.545858, 1.537653, 1.470700, 0.422056, 0.317391, 2.463825, 0.103666, 1.313960, 1.550173, 1.734758, 2.650702, 0.980899, -4.777202, -0.605416, -1.772424, -1.299403, 1.382249, -2.957427, 1.906892, -0.161418, 0.556454, 1.740305, 0.006908, 6.763729, -0.278677, -1.766817, 0.121939, 1.601009, -0.562171, 1.424215, 0.542079, -0.740449, 0.396567, -1.856834, -1.074328, -1.226289, -0.696100, 0.464042, 1.506954, 0.080288, -2.100569, 0.921613, 0.558173, 2.382897, -2.477536, -1.884583, -0.054904, -0.519191, -0.969873, 0.959781, 0.231381, 1.963495, -0.388346, 0.184281, -0.207316, -1.240958, 0.680580, -0.335134, -0.166712, 2.305028, -0.148513, 5.842828, 1.085152, 0.373194, -1.860990, 0.415133, -1.615274, -2.057973, 1.233435, 0.705104, -0.175169, 0.365537, -3.113515, -0.644583, -1.924930, 0.268149, -3.299269, -0.044249, 0.769384, 0.747958, 0.502507, 1.253872, 2.567127, -1.534801, -6.195432, 0.239830, 1.061098, 1.498278, -1.147298, -0.012934, 1.279267, 0.256549, 0.932009, 2.801676, 2.601096, -0.057946, 0.010808, 0.225650, -0.888552, -1.055726, 2.577467, 0.214108, -2.550973, -2.023343, 0.168402, 1.144072, 1.188154, 0.007975, -1.752963, -0.353960, -1.058180, 1.402650, 2.647588, 1.578010, -0.623806, -5.160738, 0.940677, -2.087744, 2.504496, 2.207713, 0.738647, -3.554515, 0.701976, -1.332600, -0.665026, -0.837372, 0.036542, 0.498252, -0.463899, -1.281477, 0.037951, -2.718753, -2.519139, 0.797102, -0.969279, -0.007088, 1.170141, -0.555038, 0.601634, -0.725569, 1.991015, 1.571092, 1.635221, -0.142499, -0.239051, -0.623268, 1.279354, 1.173028, -0.863181, 0.221621, 50.890709, 0.459216, -0.634014, -1.232200, 2.451216, 0.997746, -0.070813, 4.072617, -1.105848, -0.707585, 0.129221, -1.666970, -2.221236, -2.830456, 0.259465, 0.248408, 0.067695, -3.389023, 2.130977, 2.272633, -2.333500, -2.583345, -0.533235, -0.838626, 0.675928, 2.638815, 3.564664, -0.836662, -0.464054, 1.113580, -0.739706, -1.093256, -0.590220, 1.229130, -2.069568, -1.136705, 1.287993, 1.863708, -1.156420, -0.803547, 1.157883, 0.605184, 1.073040, 0.025076, -1.252429, 1.357709, -0.065110, -0.653028, 0.069790, 4.635374, -0.414350, 1.573298, -0.352398, 0.538733, 1.097928, 1.345788, -1.038578, -0.814868, -0.190454, -1.798921, -1.403554, 3.546619, -1.749692, 0.060874, 0.027193, 0.516291, -0.682459, -0.085446, -0.505916, -1.123992, -0.330597, 0.062728, -0.334435, 0.210847, 1.488047, -1.092073, 1.356015, 3.290716, -3.166292, -1.558093, -0.793964, 3.183681, 3.087046, 8.326931, -2.114156, -1.133654, -0.425647, -1.151897],
+	"llama3.2-vision:latest": [-3.078805, -1.697646, 4.600920, 2.136139, -1.034632, -0.008391, -0.913782, 2.059704, 1.261938, 0.314856, -0.313179, -2.066850, -0.737111, 2.565375, -1.834170, 3.589492, -2.283495, 1.541524, -2.779084, 1.555042, -0.403128, -0.313654, 1.059721, 2.011516, -5.750325, -0.567921, 2.883536, -0.803768, -2.809469, -2.432648, 0.492391, -1.709821, 0.459170, -0.239044, 8.034880, 2.616068, -0.866570, -0.313675, -0.625120, 0.185346, 2.798899, -4.163560, 1.685675, 0.213714, 3.102599, 2.594100, 1.551703, 0.171097, 1.063220, -1.646748, -0.573196, 2.495553, -0.831499, -1.483251, 2.482893, -0.726516, 0.400158, -0.145284, -3.525773, -2.577699, -2.111852, 2.612575, -3.947710, 1.561014, 0.210879, 1.777725, -0.181272, 2.283506, 1.242015, -0.025549, 0.957746, -0.246194, -0.365402, -0.971253, 4.563991, 1.184461, -0.725158, 0.744156, 0.815184, -1.637800, -4.056623, 4.689488, 0.249045, 5.708639, -3.208293, -2.040814, 0.996086, -1.227924, 2.843411, 0.019621, -0.695887, 2.414440, -0.218883, 0.861006, 3.528445, -1.916659, -0.619786, 3.501029, -0.093414, -3.492441, -1.942818, 0.583332, -0.770895, 3.453049, 0.567662, -1.010252, 2.237015, 1.537082, -2.533475, 1.944466, -0.632733, -0.084942, -0.505062, -0.605591, -1.142169, 2.620798, -1.018153, 0.334478, -1.901000, 0.225330, 0.280273, -0.487890, -1.552703, -0.719227, -3.150110, 1.320974, 3.735471, 1.304873, 5.072580, -2.039533, -3.994229, 0.517429, 1.133909, -1.422137, -1.049232, 0.620438, 1.793169, -0.807552, 0.183158, -0.329035, -0.641384, -1.251977, -1.347493, 0.622846, -0.984846, 1.988598, 0.296134, -2.342518, 0.632345, 0.030586, 0.768713, 0.437475, -2.269910, -2.521681, 1.579402, -2.377503, -2.105729, 1.594941, -2.283249, -2.322279, -0.602992, -4.794743, -2.325619, -0.762731, -0.249267, -2.240785, -2.439840, -0.962278, 1.386412, 0.236573, -0.113179, 1.957054, -0.561917, -0.478650, -1.572174, -1.008380, -2.675298, 0.341852, 2.328920, -1.276262, 0.605668, 2.816123, -0.020307, -1.087129, 18.897930, 1.684022, -5.963904, 2.153839, 0.326535, 1.419691, 1.839680, 0.152121, -0.443793, 0.515642, -1.226702, -2.276216, 1.966202, 3.762245, 1.711866, 0.671341, -3.111674, -1.433167, 1.465878, -4.289611, 2.021384, -1.996823, 0.042214, -0.409925, -0.922436, -1.212982, 1.138044, -1.522454, -3.863219, -2.712918, 2.454779, 2.351706, 0.824271, 1.279557, 2.515165, 0.249954, -0.514616, -1.325547, -1.099375, -1.732998, 1.701366, 0.903615, 1.821438, 0.362362, -1.209264, -0.406613, 0.343815, -0.926961, 2.383206, -0.131069, 2.882418, -0.794012, 1.588547, 0.142149, 1.719791, -0.516556, 1.390384, 1.157517, -2.705374, 3.152814, 1.891767, -1.230698, -2.227765, -0.907071, -2.255760, -3.713498, 0.391423, -0.435654, -2.571611, 0.210851, -2.287234, -0.744805, -0.057202, -4.453759, -0.717052, 2.153277, 2.111250, 0.667674, 2.024823, -0.369146, -2.738459, -2.051172, -1.193694, 3.160493, 1.187114, -0.628924, -1.819261, -4.691652, -1.558387, 0.826536, 2.190940, 1.007532, -0.518849, -2.375483, -3.083109, 0.717804, 0.421144, 5.876964, -1.385087, 0.068475, -0.233602, -0.143602, -0.377704, 1.460576, 1.612279, -9.952719, -0.860141, 0.009690, 3.998900, -2.104997, 0.633108, -0.447114, 1.322761, -1.092791, 1.379224, -3.249701, 0.721551, -1.894614, -0.110262, -0.938271, -2.394038, -0.695555, 2.551907, -0.666710, -1.585857, -0.769654, 3.607650, 4.665731, 0.014892, 0.578692, -0.946742, 0.955832, -1.183421, 1.518056, -0.766180, -1.134929, -5.823644, 0.987941, -1.487348, 2.554693, -0.725647, 2.227943, 1.358883, 0.959986, -1.001250, 2.546376, -1.055410, -0.220511, -0.629391, -1.756114, 0.999128, -0.474006, 0.172467, -0.361360, 0.087746, 0.241353, -1.736117, 0.641458, 1.114634, 2.208075, 1.267806, 0.002817, -2.069037, -3.187589, 1.093324, -2.561264, 2.068681, 0.762202, 0.362883, 0.582804, -1.015167, 0.106795, 2.033842, 1.161639, 0.377664, 0.452618, 3.925405, 1.386955, 3.140606, 1.267758, -1.133554, -2.524645, -0.955275, 2.711424, 0.770822, 1.372985, 3.062992, 1.510188, -0.150879, 2.115181, -0.426800, -1.095572, -0.336918, -1.500960, -0.942303, -0.226177, -0.733918, -0.031548, 0.936019, 1.037607, 2.740880, 3.736936, 0.013158, 0.783627, -2.299813, -0.794128, 1.020153, 3.677419, -1.156537, 1.287886, -0.735975, 0.271875, -0.541371, 2.015721, -1.514776, 0.217231, 1.587353, 0.698595, -4.475769, -1.661238, 1.777915, 1.495372, 8.351849, -0.992776, 2.359664, -3.734209, 0.888680, 4.039773, 1.338669, -1.540372, 2.827198, 4.282894, 0.447615, -0.325227, -5.473198, 0.158931, -2.082453, -0.471492, -0.908210, -0.229417, -0.812437, -1.082374, -2.361205, -3.133377, 2.533987, -2.056405, 0.330013, -2.951894, -0.052760, -1.753070, 0.411255, 1.419914, 1.145815, -0.162324, 0.704823, -0.556056, 1.881800, 0.478933, 1.291103, -0.936796, -4.451693, 1.578889, 0.183439, -0.686146, 3.263305, 1.043620, 2.137517, -1.800166, 4.166996, 1.393071, 4.382342, -3.738013, -0.384553, 1.550048, -12.476042, 0.659573, -0.358259, 1.873343, -0.827457, 0.601567, -1.043456, 3.390819, 0.940549, 0.042124, -0.425845, 1.186888, -0.495413, -0.849585, -1.485713, -2.023389, 3.484549, -1.132264, 2.287027, -0.184956, 0.862702, -4.589354, 3.184383, -0.465502, -0.568603, 1.523978, 1.301721, 2.099811, 1.112086, -1.031363, -5.436310, -1.959793, 2.574366, -1.392580, -0.500089, -1.667079, 0.266578, 1.470267, -2.391178, -2.849307, -2.344163, 0.151920, 0.573642, -1.668947, -1.790319, -2.520195, 1.811686, -0.149748, -2.250818, 1.122893, -2.718899, 0.765947, 0.051963, 1.497161, -2.095890, -2.668485, -0.750255, 1.363428, 0.544781, -1.652589, 3.956085, -1.285231, 3.104691, 1.370670, -0.483672, 2.409342, -1.230793, 0.431622, -2.930366, -1.572196, -1.468563, 1.024296, 0.239093, -2.253988, -0.034438, -0.042014, 0.333263, 0.990264, 0.407541, -0.725304, -0.715123, 0.676341, 0.069297, 0.102975, -0.306056, -1.024400, -0.794437, -0.089673, 0.943760, 0.686709, -3.228787, 0.669426, -5.304088, 0.056853, 1.038435, -0.478588, -0.638576, 1.946829, 2.407026, 3.066535, -1.730606, -0.888373, 2.951096, 3.153862, 0.237999, 0.558091, -2.061528, 0.783972, -0.139891, 0.005644, -0.081209, -1.382260, 1.061232, 0.716218, -2.590728, -0.990372, 1.266114, -0.707920, -3.964265, 0.948937, -4.407448, -0.231940, 0.264556, -0.267132, -3.774856, 2.497759, -0.914523, 0.970749, 3.113668, 1.660681, -0.062968, 0.503072, 1.886241, 1.214914, -0.328673, -1.813600, 0.017637, -0.110344, 3.279947, 1.179452, -2.311359, 2.184004, -1.036836, 2.163837, 0.106519, 0.639714, 2.886490, -2.178060, -0.599363, -1.816274, -0.672925, -1.578791, -0.505086, 4.701667, -0.958578, 5.368207, -0.715092, 1.727489, 2.659025, -1.855298, -2.369414, -4.724561, -0.572714, 1.991303, 2.016687, 1.089363, 3.728837, -1.101324, 2.744128, 2.082385, -0.082763, 0.874934, -0.554436, 3.319520, 0.412499, -0.356465, -0.818570, 2.214372, -0.868555, -5.018708, 1.333610, -1.507039, 1.224960, 9.023772, -0.491060, 1.643431, -1.231728, 3.141759, 0.249819, 0.606284, 1.929034, 0.079260, 0.980546, -2.081781, 0.985507, 3.136811, 1.951684, 1.112473, 3.010893, 3.541259, 3.218399, 1.626346, 0.683307, 1.771878, -1.257802, -0.757362, 1.514842, -0.636001, -0.430890, 1.693416, -1.821938, -0.486594, 2.916702, -4.420597, -0.468898, 1.278626, 1.723206, 2.731259, 2.001235, -0.968886, -0.304650, -1.206299, -2.135083, -0.916858, -0.571528, 0.047041, 3.504944, -2.133256, 0.494332, -0.482203, -3.457354, -1.942995, -0.520856, -0.325685, 2.403196, 1.200350, 3.393925, -2.494207, -1.327450, 2.736926, 1.736917, -2.051530, -0.506557, 0.020822, 2.461713, 2.484358, -1.360792, 0.210698, 2.571049, -2.315030, 4.404229, -2.766439, -0.356564, 0.609894, -0.005560, 1.713071, -0.469658, 1.197658, 0.040987, -2.285223, 1.484401, -1.409689, 4.578481, 2.506946, 0.425305, 2.446197, 3.263529, -3.631989, -0.371354, -0.413407, 4.208085, -0.127080, -1.246227, 1.026809, -0.691294, -2.296336, 0.541863, -0.415415, -2.213721, -2.229713, 5.652533, 0.325638, 0.895194, 0.288386, 2.395745, 1.893746, 1.656826, -1.891253, -0.981483, 0.977041, -1.021206, 1.278371, -0.601382, -0.586926, 1.910359, -2.599929, 2.117822, -0.329484, 2.511219, -2.765614, 1.057920, 4.557327, -1.286663, -2.828605, 1.715522, 0.892571, 1.328999, -1.452793, 3.062454, -0.846668, 2.685005, 0.747479, 2.616977, -0.771560, -1.170870, 0.398534, 0.343821, -0.425957, -1.216428, 1.275198, -0.774221, -0.835212, -0.086740, 2.298670, -0.739717, 0.506853, 0.935785, 3.649465, -2.203318, 2.277899, -0.553957, 0.137781, 0.643336, -2.148784, 1.281616, 1.584459, 0.310634, 1.217097, -1.326976, -0.676382, 2.128299, -0.981715, 0.061699, 2.889244, -0.561955, -0.823019, -1.973488, -0.304435, 0.426746, 1.849452, 0.824656, -0.400681, 1.364501, 2.863911, -1.468426, -2.614557, 2.445089, -0.290599, -0.084631, -0.135070, 1.321539, -1.531789, -0.796952, 1.839522, 1.379722, 2.139342, -0.658808, -1.957961, 1.888349, -0.784370, -1.480167, 0.501366, -2.519140, -1.949068, -2.253042, 0.033506, 0.485347, 1.676559, -1.090220, -0.454366, 2.439051, 4.116050, 0.226810, -1.543618, 0.369009, -1.356788, 1.256831, 2.512082, -5.875708, -0.840281, 1.064519, -1.075835, 0.661105, 0.859369, -2.564416, 0.931348, -0.657630, 0.169598, -0.473798, -1.840275, 1.967578, -1.207092, -2.994502, 0.979585, -2.226941, -0.148251, -2.205890, 0.192183, 0.761379, 0.832209, -0.757570, -0.044144, -5.811568, 3.817863, -0.739124, 2.566487, 1.508290, 1.364889, 1.391061, 1.753602, 2.484661, 0.561939, -2.577687, 1.308320, -0.658141, -0.613247, -1.009616, 3.694613, 3.931254, 1.646078, -2.630128, -4.893660, -2.893271, -0.949201, -0.252491, 8.096486, -2.053660, -2.715685, -0.488898, -0.369804, 1.844169, -0.557177, -3.507263, 0.625425, -0.046343, 1.116201, -2.843462, 0.430182, -0.984149, 0.188606, -4.804567, -3.662477, -12.423755, 0.505854, -2.556453, -0.261161, -2.620847, -1.616541, 0.459177, 0.800119, 2.372884, 1.032130, -0.833381, 1.391556, -0.065205, -0.212230, -1.258538, -1.861240, -0.342623, 2.821825, -3.306765, 1.124912, -2.588235, -1.198842, -2.072963, 1.051923, 2.765494, 1.943674, -0.649781, -0.090553, 0.010493, -1.542902, 7.146881, 2.114514, 2.671738, -2.078392, 2.603645, -4.148600, -2.695807, 3.128589, -1.877043, -0.506973, 3.315228, 3.669570, -1.232590, -0.327188, -1.176315, 2.137116, -0.240232, -1.170810, -3.713598, -0.264318, -0.798505, 2.017999, 0.936734, 2.098501, 1.205739, 2.284950, 0.970075, 3.022959, -0.798790, 0.537358, 3.912182, 1.456274, 0.925978, 0.233295, 0.655816, -0.774384, 0.546590, -7.141086, 2.124636, -1.520990, 0.233711, 2.825353, 0.444932, 2.467602, -0.684698, -2.486389, -1.738088, -3.114126, 2.726388, -1.019151, 4.807260, -0.181204, -1.084334, 1.163428, -0.109641, 0.964845, 0.559943, 0.336826, 0.089202, -0.693184, -0.897094, -0.925852, -0.158974, 2.087272, -0.549617, 1.179534, -0.658688, -0.299359, -2.097598, 0.542013, 1.209597, 0.868445, 3.104147, -2.820510, -1.204089, 3.737103, 0.770909, 3.525891, -1.135699, -1.655476, -2.240339, 2.525290, 1.211621, -0.105237, 0.486678, 0.251381, -2.101022, -4.316065, 1.071823, 0.767139, 0.222749, -1.501596, 2.875869, 2.010502, 2.222627, -2.201579, 1.083319, -1.388173, 1.759563, -2.029091, -1.098797, -2.860517, -0.240031, 0.644087, -1.932882, 1.401318, 2.215424, 0.366056, 0.841219, 0.509652, 1.678419, 0.370270, -1.852237, -0.313416, -2.199522, -0.294784, -2.797149, -1.652146, -0.632833, -4.488834, -0.143081, -6.097857, -5.642374, 1.487491, -1.104113, 0.571834, 0.434285, 2.254967, 1.653841, 2.349760, 2.489481, -2.731234, -0.345902, -3.468615, 0.002949, 1.483104, 1.326606, 1.378591, 0.229062, -0.190730, -2.039232, 0.562338, 0.645633, -2.672769, 3.342596, 0.399291, 0.816063, 1.965873, 1.918716, -1.087965, 0.048307, -0.756520, -2.151526, -1.697711, 0.347615, -3.180706, -0.146576, -2.279339, 3.271249, -4.460975, 2.981601, -2.770810, -0.264376, -0.206536, 0.586737, -1.217057, 1.638721, -0.420798, 1.459608, 2.833084, 0.065077, -3.276213, -4.026783, 1.695028, -0.414748, -1.509182, -2.335618, 0.645046, -4.259199, -0.605817, -2.499427, 0.038157, -0.863183, 2.061970, 0.611956, -2.115386, -3.359080, -1.596068, 0.307425, 0.191455, 0.361744, 0.899717, -1.701859, -4.992336, 1.584893, -2.895921, 0.149289, -1.404445, 0.501034, 2.682879, -0.675067, -0.786691, 1.069297, -1.640006, 0.099055, 0.312075, 1.645067, -0.090822, -2.817954, 0.371475, 3.697231, -0.769372, 1.777448, -2.683017, -0.970097, 3.940720, 2.429331, -0.532402, -0.217043, -1.411375, 2.891897, 0.184554, -1.693691, 0.541098, 2.303737, 0.010059, -2.598566, -2.101953, 1.095212, -0.516849, 1.995777, -3.868278, -3.872117, -2.956234, -2.438829, 1.945905, -2.324775, 1.419934, 4.604033, -0.205442, -1.559651, -1.537539, -0.980349, 2.164563, -1.639275, 2.282555, 0.196895, 1.739233, 6.665299, 11.612860, 2.788843, -1.838532, 0.987776, 0.625764, -1.816129, 1.568187, 2.474130, 0.996020, 3.790505, -0.408657, -0.948997, -3.530251, -1.080017, -3.417130, -2.058712, -1.956407, -2.989211, 1.230417, -1.237799, 1.329787, -1.045655, 0.436384, -0.770900, -0.847061, 0.136578, 0.058188, 3.483011, 2.304829, -0.042124, -0.562030, -0.265483, 3.568855, 2.773987, 0.164709, -6.854160, 2.262440, -1.915387, -0.935466, -2.306059, 1.185262, -2.824844, -1.654353, 3.939613, 3.268545, -0.325672, 1.448018, -0.088753, 1.695897, 0.997020, -0.638259, 5.060489, -1.404778, -0.066517, -1.533605, 1.931493, 1.549286, 1.261844, -0.025092, -5.829014, -1.885451, 0.096401, -0.849005, 1.323331, 1.640723, 2.387309, 0.977560, 1.003233, -2.851422, 2.125631, -0.622645, -0.251111, 0.108200, -0.860243, 2.165220, -1.747716, -1.526603, -0.160664, -2.234724, -3.378771, -1.829369, -0.828802, 1.092334, -2.838180, 2.241061, -0.070938, -0.125325, -0.533843, -0.900653, -3.907611, 0.291193, 1.380849, 3.148268, 2.248498, 2.641789, 1.162023, -1.307409, -7.452969, -5.762230, 0.570644, 2.225794, 2.268607, 2.220694, -1.868391, 1.590621, -1.087466, -1.318701, 0.355766, -2.192402, 5.148515, -1.260565, -0.340788, -3.066326, -4.802943, -1.822399, 2.027146, 2.033635, 2.484090, 0.864079, -2.444166, -2.060366, 0.691616, -0.893325, 1.614154, 2.483134, -7.538818, -2.377872, -0.144321, -0.343106, -0.466051, -1.690653, -3.003664, -0.220598, -1.996093, -1.322911, -1.798978, 0.656198, 1.923437, -0.542048, -0.335357, -0.584021, 0.888298, -1.122800, -1.792932, 0.603619, -1.189901, 0.216393, -2.956912, 0.420574, -1.393399, -1.954148, -1.921767, 4.165194, -1.175791, 2.798207, -10.199874, -0.837972, 1.866549, 4.939780, -1.918338, -5.765625, 1.894684, 2.931093, -2.691194, -1.582052, -0.238202, -2.357203, 3.227861, 2.676049, -1.514403, 0.610364, -2.333433, 1.532472, 3.116742, 3.468613, -0.903491, -3.069241, 2.971029, 1.897196, -2.358795, 1.614014, -3.038753, 0.296462, -3.413616, 1.476674, -2.205294, -0.426963, -1.149141, 0.687046, 1.459785, -1.579808, -0.128799, 1.095678, -4.413157, -0.583210, -0.037564, -3.217687, -3.874143, 1.367258, -0.658092, -4.242138, -1.566653, 1.616565, -0.231850, -0.561782, 0.175397, 0.873898, 2.102082, -0.481464, -1.669100, -0.535181, 1.875431, 4.580070, -2.197358, -0.910670, -0.982496, -2.303578, -1.195334, -0.761116, 1.036271, 2.430554, -1.472700, 0.805644, -0.503828, -0.386881, -0.879527, 0.931079, -0.833261, -0.988179, -1.804157, 1.147231, 2.970528, 0.660973, 1.180153, 1.487581, -2.030739, 1.501949, -0.520467, 1.889769, 1.071903, 0.089176, 0.463579, -1.126580, -1.210711, 0.082390, -1.625102, 0.874795, -1.195674, -3.522628, -1.638729, 3.287249, -1.166453, -0.828902, -1.406641, -4.649971, -0.544490, -1.771114, -0.274173, -1.582270, -1.786603, 3.387163, -0.859107, 3.550398, 0.630316, -1.103999, -2.080497, -2.685435, 4.383327, -0.171877, -0.829238, 1.843727, 0.737404, 1.495080, -0.509761, -0.942684, 0.598197, 0.137417, -2.112840, -3.016319, 2.016804, -0.613345, 2.634976, 3.474667, 2.299239, 3.921109, -0.259313, 2.293547, -0.668590, 2.097881, 0.135472, 1.272800, 1.023938, 3.750861, 3.546648, -5.987249, 5.189409, -2.012692, -2.080787, 3.648663, -1.718726, -3.059613, 0.914977, -0.070629, 2.184004, -0.554282, -1.572647, -4.378010, -1.895564, 2.295761, -1.807297, 0.939028, 3.319588, 0.981454, 1.846804, -0.074813, -0.959925, 1.180125, 0.214293, -1.340116, 2.026832, -0.275083, -2.391880, -2.757796, -2.484968, 0.950552, -1.653520, -1.723869, 1.662331, 0.535817, 0.273211, -1.151067, 2.993492, -0.075704, 0.320338, -1.407653, -1.041616, -1.881412, -2.501787, 1.301393, -1.517647, -2.464800, 0.252449, -1.997724, -3.026290, 0.212039, 1.502458, -4.246790, 1.029287, 2.246881, 0.741828, 0.158541, -1.028801, 2.529005, -0.893468, 1.443282, -0.346494, 2.669097, -2.864852, 3.209790, 3.041502, 0.020234, 0.115243, -2.031523, -0.282542, 0.396332, -1.363705, 3.021288, -0.318689, -1.363607, 0.955074, 0.509633, 2.074913, -1.201262, -2.390836, -0.396984, -1.613606, 0.360301, 1.139240, 0.839843, -0.285194, -0.322745, 0.437023, 1.281257, 3.713523, -2.683018, -2.837252, 2.113353, 0.791203, 2.814537, -2.278843, 0.702416, -1.589167, 1.516128, 0.348960, 1.021993, -2.720528, 2.671391, -1.391619, 2.161263, 1.234758, 3.476467, 2.706942, -2.214298, 0.062933, 3.817600, -1.251961, 3.566387, 1.513068, 0.747667, 1.597302, 3.858117, -1.373156, 0.729746, 1.749554, 2.064092, 0.612888, -1.871164, -0.993573, 1.980641, -0.394882, -0.771789, -2.300255, -1.864526, -1.332325, 0.679321, -1.628580, 1.684532, 0.794181, -0.341031, 0.865944, 1.570801, 3.436136, 1.568980, 0.631498, -0.048965, 0.418611, 0.775022, 4.532741, 0.286989, 1.137858, 0.944515, -2.167217, 3.400816, -2.120444, 1.896021, 0.739563, 1.800057, 0.976187, 2.241848, 1.109004, -0.139124, 2.273750, 2.243190, -0.000260, 7.102096, -0.981638, 0.517041, -0.106855, 1.611364, -0.680890, -1.851746, -0.071771, -0.558747, -0.778105, 1.001288, -2.798460, 4.107984, -2.121557, 2.265193, -0.130054, 1.358769, 0.994828, -2.313165, 0.441207, -0.300783, -0.672292, -1.479555, 3.422815, 0.001658, 0.206383, 3.627066, -1.661040, -1.218103, -1.546490, 3.201838, -0.425097, -1.208788, 1.069553, 1.511283, -2.078781, -0.688469, -2.191390, -2.106024, 1.625274, 2.402612, -0.825243, -4.379784, -1.812769, -0.058542, 1.346276, 0.333447, 2.418711, 1.475576, -2.620394, -0.280505, 1.131126, 0.607015, -1.094618, -2.319695, 1.346097, 2.262267, -1.525486, 1.444179, 0.501193, -1.753824, 0.733545, -2.394020, -1.725506, 0.408842, 1.683417, -3.182556, -0.488479, -1.059273, 2.058362, 3.142289, -11.317917, -2.676118, 2.479192, 2.741839, -1.815452, -1.900564, 3.863253, 0.957858, 1.753211, -2.094190, 1.561213, 1.603490, 1.401700, -1.423863, -1.557342, -1.070505, -1.258466, -2.814864, -0.233111, 1.705125, 1.368987, 0.674738, 1.317454, 1.040991, 0.379262, -3.057135, 1.303494, -0.719236, -0.150981, -4.246114, 0.012882, 2.754486, 0.887157, -2.154360, 0.434403, 2.080772, 12.592675, -1.400895, 3.682422, 18.500687, -1.824731, -1.229960, -1.306891, 4.426475, 4.153411, 0.296232, 0.334915, -0.198403, 0.207817, 1.373839, -1.809203, 3.648643, -3.357051, -0.607143, 0.804729, -2.215043, 1.579057, -1.446862, 2.116117, 0.022489, -0.106560, 3.259476, 2.179984, -0.827409, 0.466238, -0.174084, 0.749496, -2.032082, 0.819768, -0.012092, -1.404604, 2.040248, -4.373999, 2.905264, 0.524823, 3.290520, 1.260755, -1.269325, -0.546155, 0.301613, -3.231517, -2.633416, -0.817336, -2.118700, -0.650470, -2.887156, 1.295805, 1.893788, -0.623003, -0.415323, 3.246930, 0.012536, -2.414260, -1.570900, -0.459119, 2.363632, -1.078479, 0.124088, 3.440009, 1.118437, -1.124332, 4.910594, -1.549336, -1.430183, 1.136268, -2.112774, -1.333619, 2.121091, 2.231785, 1.296702, -0.906199, 0.965406, 3.869365, -1.652183, 0.759212, -2.980289, 0.682221, -0.984810, 0.634504, -1.211771, -2.491594, -0.945581, -2.850454, -0.138457, -0.431604, -0.632796, 1.385889, -0.483601, -1.077885, -1.354634, 1.187483, -0.412081, -0.877794, -0.520237, 0.527379, -2.126064, -1.391075, -0.027463, -3.005242, 0.156661, 2.754908, -1.378801, 0.082020, 0.123864, -0.290277, -3.230394, 1.301592, 2.213902, 3.094155, -1.755002, -2.322603, 0.064931, -1.328647, 0.874909, 1.640637, -0.022136, -1.437375, -0.168740, -1.580431, -1.466243, 0.074297, 1.542410, 0.640596, -2.803895, 0.838751, 1.192846, -2.151078, -1.352274, 4.536688, 0.652296, -1.681036, -1.748077, 1.116713, 1.505466, 1.093051, -0.850750, -2.080233, -1.093351, 0.568372, 1.615424, -2.187084, -1.067516, -2.221610, 0.670807, -0.322722, -0.557603, -2.128840, 2.599048, -0.295556, -2.070664, 0.996682, 2.272573, 0.704867, -0.482912, 0.789556, 2.423104, 1.283592, -1.183174, -1.525468, 0.091626, 1.759403, -0.894384, 1.708647, 17.310020, -3.481026, -5.126164, -0.462365, 3.699655, -4.189553, 0.381989, 1.898774, -0.363996, 0.458372, 1.472605, 2.367271, 0.744145, 1.278426, -1.720926, 0.309616, -0.775783, -1.905312, -0.546735, -1.651521, -1.946627, 0.678068, -0.779717, -0.981420, 2.827802, -2.157915, 0.100543, 2.626758, -0.105437, -3.918166, 0.863819, -3.190261, -1.860274, -3.934666, 6.241655, 0.965956, -2.913852, 0.003365, 1.201703, -2.250198, 0.700441, -0.116006, -2.632267, 0.289158, -1.789185, 0.158896, -2.259541, -2.592633, -1.587397, -3.143592, 0.232405, -0.945033, -2.211038, 2.132655, -12.288932, 3.241026, -1.518620, 1.077391, 1.059972, -1.065658, -0.746511, -1.032448, -0.192777, 0.732772, -1.637340, -0.360361, -1.155054, -0.207818, 1.369705, 2.599172, 2.482543, -1.180554, -0.888696, 2.530247, -0.034697, 0.959176, 1.193800, 0.326233, -1.076948, 3.587785, -0.672961, -4.438885, -0.397180, -3.531070, -6.830092, -0.541381, 0.651230, 0.792656, 1.801674, 0.653098, 0.020145, -0.930110, 1.829328, 0.417755, -1.133758, -0.575342, -0.281619, -2.819133, 0.293893, 1.141983, -1.487329, -0.180050, -0.480632, 0.239821, 1.104678, -0.611782, -2.685412, 2.376577, 1.076170, 1.586790, -0.723516, 0.346962, 2.835624, -0.571163, -0.018835, -1.004567, 1.764784, -1.652679, 0.181482, 1.126542, -1.868064, 0.287467, -2.103253, 2.532168, 4.365986, 0.254799, -0.283266, -2.060601, -1.498800, -2.974444, -1.227938, 2.405658, 0.629228, 2.409974, 0.530177, 1.364315, 4.653522, -0.811898, -1.583435, -1.069651, -2.269577, -1.162140, 3.601405, -0.490714, -1.997232, -3.724590, 3.772781, -2.163326, -0.496667, -4.260191, -1.616646, -1.990680, 3.304270, -2.352931, -1.146297, -6.734396, 1.459528, -0.291793, -0.521500, 13.051759, -2.432004, -2.658777, -1.149259, 1.436345, -0.751880, -2.189231, 0.615586, -2.498409, -3.291502, -2.095250, 1.040146, 0.714031, 0.970416, -0.616222, -2.000639, 1.994921, 2.395315, -0.561833, 0.121436, -1.408910, -2.248885, -1.112001, 1.088048, -0.414269, -0.435527, 3.829697, 0.528129, 2.375221, -0.519734, -0.971582, 1.406152, 2.238408, 2.720003, -0.267679, -0.791799, -0.090492, 1.500209, -1.946325, -1.748135, 6.836010, -1.218125, 0.738863, -1.162376, 1.206608, -0.377882, 1.355038, 0.775188, -0.811568, 1.407529, -0.604065, 4.010166, -1.165298, 0.672806, -5.079141, 0.690641, 0.379462, -1.214549, -1.517621, 1.051511, 4.106417, 0.001753, 1.941271, -0.016778, -0.369193, -0.438197, -3.510782, -0.501950, 3.121536, 0.598490, 3.841994, 0.205602, -1.246792, -1.616723, -1.143543, -2.465981, -2.489971, 2.632675, -1.313524, 0.345787, 0.992921, 0.425921, -2.188962, 0.680292, 0.194884, 1.859877, -0.628118, 0.726368, -2.322854, -1.007814, 0.255692, 0.737325, 2.278359, 0.366714, -1.926799, -1.290199, -0.082554, 0.797631, 3.216966, 0.944166, -3.854357, 1.882313, -0.757823, -2.803020, -1.485754, 2.979990, 0.822588, -0.639839, -0.333204, 1.326457, 0.428489, 2.403607, -0.028208, 1.258138, -1.414546, 2.595982, -1.312079, 1.871267, 3.542688, -1.192679, -0.747623, 0.372664, 0.500303, -2.334963, -5.068396, -1.425156, -2.123881, -1.776343, -0.095469, 1.043374, 1.254840, -0.709997, -0.800484, 0.942643, -1.944042, -0.205155, 0.038740, 2.047468, -1.089243, 2.242003, -1.200643, 5.502473, -0.559836, 0.693430, -0.248119, 3.774752, -1.164453, 0.814244, 2.204838, 3.310038, -1.259518, -1.539988, 0.631875, 1.811592, 1.660107, 1.233070, 1.175259, -0.912320, -3.419650, 3.499059, -2.343933, 2.245733, 0.961249, -1.360731, -2.247202, -1.547391, 0.428645, 3.232787, 0.094637, -0.361900, 0.301173, -0.484404, 0.424156, -1.495115, 2.174474, -4.887064, -3.342853, 2.512707, -5.732465, 2.330478, 0.607593, 2.459414, 2.199535, -0.540271, -0.431682, -0.428440, -0.331239, 7.473131, -0.676626, -1.463685, 0.149552, 0.061583, 1.886732, 1.978416, -1.099899, -2.279263, 1.284642, -2.398491, 1.477496, -0.910583, 1.879131, -0.457095, -0.015456, 0.256275, -0.456831, -3.490171, -0.004046, -0.551340, -0.684976, 0.764194, 0.847659, -3.879746, -3.626539, -1.568061, 0.991960, -0.737746, 2.614909, 1.940545, -3.691171, -3.245214, -2.179925, 2.383390, -1.783122, -0.506950, 1.127885, -0.890014, 1.435303, 3.048434, -1.386346, -1.076663, -2.896395, -1.343953, -2.565495, -1.381629, -1.268511, 1.228591, 0.993910, -2.575418, 0.648286, 1.532965, -0.935785, -1.605901, -1.246196, -2.201324, -0.857666, 3.537265, -3.682135, 0.672675, 0.951825, -0.827279, -3.059412, 4.575532, -3.718941, -0.977951, -1.166684, -1.237993, 3.090737, 1.565580, 1.461544, -1.929372, 1.394221, -3.346243, -0.226639, 1.119265, -1.345803, -0.899012, -2.456377, 1.593067, 0.910089, 1.503470, -1.812514, -1.400435, 1.177375, 0.096689, -0.294497, 0.204516, 0.525131, 1.091468, 0.787260, 0.021002, -1.185784, -0.505473, -0.187676, -1.750899, -3.509349, 2.650220, 1.842912, -1.387142, -2.165509, -0.041363, 0.090037, 3.887540, -2.342398, 0.609889, -0.786447, 1.820952, 1.236354, -1.621533, 0.662158, 2.572353, 0.277775, -0.603314, -1.811826, -0.732945, 1.049378, -2.206546, 1.959944, -1.303532, 0.861363, -0.612073, 0.364254, 1.629579, 0.628865, -0.018568, -3.558367, -0.744735, -1.004377, -0.180408, 0.018036, 0.942299, 2.755231, -3.410764, -1.202904, 1.628659, 0.269436, 1.631451, 2.589946, -0.040498, 1.630141, 0.496223, 1.869666, -0.404186, 1.478820, -0.379239, -3.533465, 2.040182, -0.441357, 0.470875, -0.983391, 1.173682, 1.085656, -0.516549, -0.635032, -0.766274, -2.671732, -1.281379, 1.456411, -0.915409, -3.183290, -0.204772, 2.401025, 2.152182, -0.233675, 0.305950, 2.888514, 1.930429, -1.184927, 1.662104, -0.271050, 1.988250, -0.984551, 1.636173, -0.936373, -0.207487, -3.862692, -0.202516, 1.630704, -0.943317, 0.691296, -1.044768, 0.531007, 0.471725, 0.821349, -1.234496, -1.380861, -4.413808, 0.437657, -2.809109, 0.977942, 0.700759, -0.938202, 2.930912, 0.906982, 0.537006, -0.933399, -0.012241, -0.700445, 1.411654, 1.602838, 0.170198, 1.034546, -0.294352, -0.277783, -3.215640, -0.310807, 1.199179, 0.741775, -2.316380, -0.154027, -2.767129, -2.038565, 1.119279, -2.533041, 0.076981, -0.187397, 1.210819, 1.391541, -2.467751, -0.143722, -0.905126, 2.260527, -0.993221, -1.232143, 1.834083, -0.804748, -1.330043, -0.310126, -2.712278, -2.764476, -1.708571, 1.639717, 9.467136, 1.056757, 2.668218, 0.011055, 0.992315, -2.199364, 2.513154, 2.435208, -2.443968, 0.233445, -2.318432, 0.602261, 0.931487, -2.197106, 2.200380, -1.881728, 3.087734, -0.060384, 1.490917, -3.820469, -2.404525, 0.082236, -1.416263, 6.533288, -1.271866, 0.874339, -0.064727, -0.294185, 0.155135, -0.258834, -3.481010, 2.522374, 1.355910, -1.698231, -2.581586, 1.589780, -1.474311, 3.495509, 0.679703, -4.357119, -0.388887, 2.020856, -1.231388, 1.933426, -3.399568, -2.166835, 3.597775, -2.225888, -2.851705, -4.492297, 2.081487, 1.752849, 0.931443, 1.775176, -1.427293, 3.582803, 0.461577, 0.847037, 0.082594, -1.585400, -1.084166, -1.576557, -0.957542, -0.661302, 0.782861, -0.174181, 0.403607, -0.843751, 0.629765, -0.536972, -0.854492, 3.225008, -3.574810, -2.818572, 4.156142, 1.869532, 1.431887, -0.001005, 0.652833, -0.933383, 0.058137, -1.479301, -2.311369, -3.555691, 0.354053, 1.014045, -2.245627, -3.368932, 0.047818, -3.167487, -1.033651, 0.841527, -1.111248, 3.357744, 1.951621, 0.393261, -3.040123, -3.178878, 1.615363, -0.041975, -2.667777, 0.146986, -1.417583, -1.627755, -0.685473, -3.537882, 0.029217, -0.811283, -0.627015, -0.022089, -1.536894, -1.870402, 1.232272, -0.325790, 1.623947, 0.759523, 1.878842, 2.110918, -0.386001, 0.917781, 0.609635, -0.115149, 3.891633, -0.426539, -0.111032, -0.530549, 1.704758, -2.846103, 1.041818, 0.211156, -1.941607, 0.620646, -2.002606, 2.378968, 1.088245, 4.909698, 2.291356, -1.610609, -2.184232, -1.090413, -0.585811, 1.194546, -1.638153, -0.257659, -0.407240, 0.275248, 1.816893, -0.549051, -4.227658, -0.838440, 0.708863, 3.270518, -3.819626, 1.362298, 3.398038, 0.340970, -0.061836, -0.640408, 2.750417, 0.624174, 1.774026, -2.950428, -0.631979, -3.549893, 0.611664, -3.074150, 2.049044, -0.319013, 0.005977, -0.195755, -0.366373, -1.888581, -3.412463, 2.255754, 1.286729, -0.145309, 1.804818, 2.039304, -1.039636, 1.837572, 3.214562, -3.287841, -0.662308, 3.181302, -3.620912, -0.062886, -0.094632, 0.150107, 0.930212, -0.841085, -2.030969, 1.515888, 0.988359, 1.924312, -1.151969, 0.427673, -0.768772, -3.342158, 1.639557, 0.227930, -0.515050, 1.370292, 0.266383, 1.212152, -0.133667, 0.940615, -1.409638, 1.942192, 2.937795, -0.097905, -1.931062, 0.930637, 1.869293, 1.406385, -1.296549, 0.331738, -2.143225, 3.932092, 2.715863, 2.412196, 0.995437, 3.460501, -1.302808, 2.907689, 1.655309, 2.478021, -0.200031, 3.047060, -0.410674, -2.622407, 0.302213, -2.410561, -0.752223, -2.774459, 0.095784, -0.415811, -1.300381, -0.542583, -2.131463, 2.296709, 1.587769, -0.620450, -1.230258, -1.553957, 1.986467, 2.022719, 0.715672, 2.198387, -1.387220, -1.690441, -4.472634, 0.718367, -1.587503, 1.961340, 2.033993, -2.388419, -0.369963, 0.497210, -3.200239, -1.881070, -0.903583, -2.291441, -1.807256, 0.693820, -1.195831, -0.751228, -1.074330, 0.841028, 1.857826, 3.043664, -0.462683, -1.359275, -3.413938, -1.533384, 0.597736, -1.686284, -4.281181, 2.230808, -1.951093, 0.725956, 2.111386, -1.241247, -0.219312, -0.911483, 1.301086, 2.288175, 2.471712, 1.183948, 1.253952, -1.300000, -3.339571, 1.425936, -3.129603, 0.875244, -1.445471, 1.345114, -3.602144, -0.701855, 1.367753, 0.410516, -3.014481, 0.615465, -0.351461, -1.487927, -2.222125, -3.888845, 2.094670, -1.978802, -0.517767, 0.058138, -0.444669, -2.294036, 1.998730, 0.297695, 0.018321, 0.268706, -0.879354, 0.274088, -0.445150, 2.828982, -0.520435, -1.824163, -3.204499, 0.636438, 1.238371, -0.863154, 4.537728, 2.181285, 2.711806, -0.882168, -2.096454, 1.998263, 3.602588, 2.114964, -1.150518, -0.076333, 1.211806, -2.149293, -0.410827, 0.744497, -1.594841, 2.430060, -3.093566, -2.016082, -2.308710, 2.795972, -2.289312, -0.833563, 0.530762, 2.017911, 1.745501, 3.915513, -3.470293, -6.470500, -0.332230, 0.011448, -0.394733, 2.631524, -1.241708, -0.602433, -2.349052, 1.143003, -1.018770, -0.317715, 1.176016, -0.424689, 0.303274, -2.129854, -0.518942, -1.190350, -3.030563, 2.986264, 0.322974, -1.171382, -2.080170, -1.518298, 1.755744, 2.481019, -1.135633, 1.196923, -2.293517, -0.416247, -0.419748, 2.791564, 2.139885, 3.192137, -0.693876, 0.322415, -3.658330, 1.089203, 1.927066, -3.449436, 0.115316, -1.598851, -2.275302, -2.185232, -1.625353, -0.050324, 0.472127, 1.869982, 2.535981, -4.294853, -2.277530, 1.433655, 1.217977, -0.048806, 2.771290, -1.207959, -2.310196, 2.591723, -2.335873, -3.282704, 1.380378, 0.943002, -3.358639, -0.853271, -2.042148, 0.580582, 0.059815, 2.079012, -2.376285, -0.196453, -3.680857, 0.270507, -1.599318, -0.332833, 1.543115, -0.472989, -0.988075, 1.807841, -1.925379, -0.623305, 6.014649, 0.391908, -1.238512, 4.130530, -3.661225, 4.244806, 1.479349, 0.690953, 2.257133, -0.744723, 6.228973, -1.201092, -0.397441, -1.781538, 6.628207, 4.155041, -0.528522, 0.690781, -1.653167, 2.763517, 1.429862, 0.232714, 2.562456, -1.089225, -1.969519, -2.903962, -1.258486, -1.445670, -0.693330, 2.552694, -0.960994, -2.646074, 1.737455, -0.539961, 2.642196, -0.004014, -1.921011, 0.863813, -0.337553, -0.190130, -3.074884, -0.279881, -2.351591, 0.643860, -1.237673, -2.046865, 0.629775, 0.710130, -11.075646, 0.213202, -0.315120, -1.107641, -3.423430, -0.593594, -2.706415, -1.796721, 1.453879, -2.855584, -4.217345, -0.273067, 0.940133, -1.611402, 1.074368, -0.007585, 0.811808, -0.326986, -1.716057, -1.484485, 0.644776, 1.441688, -1.257548, -0.016782, -8.116377, -1.092561, 2.480797, 2.979291, -0.185106, -1.670235, 2.845420, 0.410733, -1.724248, -1.519051, 1.600042, -3.040879, 0.611754, 2.122114, -1.111939, -1.519439, -0.995059, -1.118172, 1.793956, -0.745843, 1.341350, 1.434149, 2.423525, 1.703777, 0.244239, 4.135608, 0.343924, 0.921309, 1.121611, 0.995589, 4.442313, -0.761273, 3.066107, -2.289390, 1.500767, -3.724264, 1.227392, -1.030693, 0.063110, 2.629669, 1.274395, 0.414828, -1.593909, -1.180849, 1.649198, 0.442606, 0.564936, 1.496706, 1.280906, -0.746933, 1.430545, -1.602895, -1.896697, 0.114705, -3.046932, -2.504452, -0.288728, -0.457942, 2.345242, -0.296523, 2.460886, 1.491791, 1.460493, 2.949695, 0.646835, 1.938821, 1.221826, -3.029753, 1.027855, -1.815510, -0.346786, 0.590048, -2.604692, 2.580613, -5.464456, -2.245464, 2.986708, -1.448367, -2.161349, 3.038980, -0.680506, -0.707855, -0.048277, 2.409130, -3.707219, -1.432573, -1.219632, -0.352755, -3.242210, -2.011038, -1.698111, -1.534498, -1.411064, -1.235086, -3.260119, 0.751513, 2.037949, 6.087719, -0.354985, 2.296287, -0.823595, -2.583332, 2.302197, 2.243156, 0.054603, -0.797066, 3.940995, -0.038713, 1.843858, -3.207862, 0.474149, -1.900639, -0.749204, 1.216251, -1.687184, 0.714031, -0.671786, -4.782870, 0.280564, -4.122239, 0.000554, -1.740705, 0.414890, -0.243921, -0.324114, 0.220153, 0.294286, -0.331063, 1.128937, -0.042819, -2.156856, -2.752139, 1.753495, -4.386309, 2.646769, 1.862037, -0.717543, -1.894679, -0.222581, -1.100053, 2.620292, -2.685768, 3.412238, -3.131670, 2.225328, -3.709669, -1.838988, 1.399511, -0.762291, 1.787900, 2.572352, 3.131334, -1.663082, -1.124640, 1.764323, 3.182696, -2.977489, 1.514403, -0.245818, -2.238850, 4.151056, 0.036864, -0.186692, -2.001691, 1.246032, 1.557085, 0.019165, 0.828518, 3.140849, -0.298136, 3.447491, 2.868086, 2.243513, -1.050800, -3.414328, 3.056563, -3.797553, 0.326561, -2.106754, -1.037465, -0.276275, -2.951207, 2.119463, -3.765703, 2.635464, 1.421755, -1.351970, -3.440002, -1.215871, 2.068440, 0.206381, 2.305964, -1.791120, -1.052097, 0.080710, 1.619676, 2.618592, -1.497641, -4.078834, 0.682429, -0.353784, -0.943733, -1.674624, 0.674289, 1.971855, -1.487211, 0.966145, 3.574993, -1.054608, 0.972266, -1.674012, 0.498910, 2.235878, -2.991302, -0.194584, -0.462182, -2.855854, 2.037934, -1.383906, 0.133169, -1.456793, -1.011050, -1.552601, 0.229914, -0.716652, -0.209454, 3.012683, -4.222848, 0.078118, -2.066082, -3.165344, 0.016393, -2.448389, -0.584697, 2.741099, 2.008625, 0.237100, 3.490952, -2.384674, -0.600877, 4.660543, 0.360961, 0.290765, -0.756962, -2.478991, 2.993396, 0.240600, 2.667300, 1.342028, 0.278699, 2.355097, 0.520877, -0.399971, -0.270937, 0.079413, -0.333582, 0.991009, -0.277407, -0.802320, 0.877358, -0.158195, -2.252553, -0.421490, -1.147035, 1.896074, 0.303360, 0.284368, 0.645959, -1.861567, -0.189420, 2.494082, 1.622385, -0.301365, -3.662149, 4.510878, 1.651112, 0.298648, 2.755453, 0.300312, 0.577870, -2.157631, 0.022099, 4.050114, -1.113486, 1.166550, 1.433385, 0.636592, 0.365205, -1.710035, -5.819573, -0.750998, -0.700895, -0.252428, -2.303637, 0.122997, -2.406902, 0.796248, 0.074350, -0.191512, -1.336334, -0.727378, -0.217502, 0.768734, 0.026973, 1.666830, 0.837306, 2.636509, -4.413403, -3.378228, -1.265510, -0.660484, -0.527256, 0.652431, 0.810927, -0.955311, -2.194607, 3.390256, -0.676141, -2.678200, -2.246846, 4.665495, -0.300063, -1.505307, 0.372656, -0.314319, -1.683735, 1.244436, 7.202695, 0.347428, -3.931497, -0.384359, 0.070719, 1.927199, 2.370592, 1.543958, 1.292192, -1.171941, 0.515360, -1.998340, -2.632734, 1.160288, 0.932155, -2.313407, 1.186146, -1.297318, 0.411109, 2.077339, 0.377120, 3.878205, 1.169885, -0.514475, 1.380647, -0.220764, -6.186738, -0.987336, -1.161258, 0.022470, 2.944921, -3.944496, -2.454340, -1.350916, -3.333500, -3.976480, -0.813268, -0.936630, 4.470922, -1.197749, 0.999399, -0.848027, -0.556465, 0.876649, 4.624232, 1.716997, -1.316871, 2.161342, 2.106090, -0.884313, -0.900795, -1.254095, -0.401763, -2.136899, -1.426937, -1.633984, -1.857804, 4.080161, 2.620926, 3.743167, 1.912780, 1.157939, -0.232982, 1.485714, 3.776951, -0.728675, 0.096456, 2.419091, -2.536920, 1.749187, 3.919883, 1.045194, 0.124967, 5.034767, -0.020524, -5.442020, -0.340481, 0.847831, -0.570193, -3.708969, -1.694553, -1.775758, -1.774149, 1.016557, 2.744298, -0.981895, -3.203657, 2.381986, -0.130370, -0.575154, 0.981555, -2.114384, -0.703772, -1.544474, -4.912024, 1.436929, -3.612131, -0.513180, 1.124501, 1.500321, 1.228162, 2.164581, -1.008544, 3.386724, 5.138061, 2.143028, -0.163646, 3.132955, 0.232388, 0.140801, 2.374613, -1.336730, -3.779842, -0.695596, -1.955346, -5.124892, -0.852984, 1.089569, 1.047508, -0.899278, -2.671301, 0.775844, -0.359940, 2.370226, -1.953356, 1.204961, 0.575502, 4.395379, -1.004487, -2.043077, -0.088922, 2.446170, 1.487957, -0.670360, 0.311429, -0.970048, 4.440516, 0.804933, 1.320401, -1.760813, -1.227641, -0.585840, -1.419913, 2.561329, -1.393864, -0.514215, -1.485557, 2.117904, 2.916862, -0.557396, 3.088374, -0.601427, -2.839745, -0.335838, 1.241137, -0.483900, 0.794831, 1.195930, 0.649772, 4.032490, -0.526397, 0.963251, -0.133096, -0.534765, -0.111903, -1.648276, -1.044494, 0.361883, -0.918383, -0.419664, 0.178241, -0.791128, -1.730827, 0.185987, -2.588084, -0.286288, -1.370608, -4.314114, 1.166656, -0.317811, 0.544867, -1.790729, 1.485108, 3.057710, -0.402884, -3.525095, 1.046638, 0.469837, -10.401346, -0.187911, 0.602185, 0.784814, 0.686189, -0.628216, -2.192061, -0.726722, -0.583705, -1.816725, 2.630092, 2.962842, -0.098243, -1.397867, 4.522958, -2.306017, 0.158243, 0.021326, 0.669872, 1.986373, -0.030303, -0.977807, 4.633324, 0.498535, 3.087742, 3.187632, -1.225902, -1.390914, 0.624044, 1.865394, -3.805171, -0.420694, 2.681516, -4.424584, 0.019501, 1.461479, 0.945931, -0.460550, -0.178802, 1.539152, 1.010580, 1.188666, -3.036549, -0.074547, -0.933213, 1.277559, -0.826474, -1.801009, 1.385077, 0.442225, 3.276604, -0.478880, 1.084784, 0.558545, -2.902088, 1.052059, -1.853553, 0.754707, 1.477189, 0.535306, 1.659573, 0.157024, -2.818528, 4.611130, -0.069930, 1.186805, 3.071634, -1.309555, 2.216107, 1.327526, 5.102565, -1.661079, -0.484750, 2.699060, 2.793719, 6.261371, 0.641963, -0.288440, -0.679968, -2.719732, -1.884756, 4.892308, 1.928781, 0.405360, -0.971816, 2.584059, -0.217052, -0.968458, 1.336929, 3.495005, 1.708722, 0.110665, 0.281588, -0.007339, 0.161777, 2.841527, 2.780258, 0.515291, -3.523671, 0.947459, -0.148462, -1.500179, -3.493957, -2.649297, 1.730525, 1.296232, 0.803082, -1.144793, 2.993541, 0.136460, 2.948369, 4.233619, -0.316875, -1.924663, 2.759971, 1.683363, 2.597641, 0.497635, 1.238039, 0.183853, 0.467309, 2.465760, 2.984601, -0.765347, -1.204559, 1.072209, -3.060474, -0.113188, -1.049060, -1.903181, 2.465544, 2.311688, 0.107047, -0.454180, 1.233230, -0.411675, -1.004517, -1.172773, -1.903938, 3.785788, 2.654215, -0.098252, 2.272377, -2.599964, 1.309094, 4.073755, 5.101512, 0.368129, -2.872931, 0.665823, -0.812287, -1.531622, 2.246672, 1.337333, 0.032180, -1.559512, -1.714184, -0.499598, -0.752196, -0.304698, 0.928405, 2.025278, 1.975079, -1.431077, -0.394958, 0.576032, -1.239387, -2.800098, -3.549074, -2.340450, -3.898644, -2.728451, 3.891414, 0.831638, 2.061581, 1.196748, 1.816497, 0.156793, 1.248291, 1.444935, 2.782322, 0.900815, 3.566337, 1.390690, -4.955343, 0.791208, -1.397928, 1.828276, -0.509986, -1.943480, -4.032582, -3.864328, -1.491560, 1.075016, 4.650043, 0.521032, -0.454500, 0.902760, 3.573976, -1.945750, 1.468956, 0.404240, 0.927548, 2.688647, -1.748163, 2.081175, 2.447916, -0.936185, 0.596772, -2.287498, 0.668699, -0.389346, -4.987719, -1.310262, -0.750813, -0.004687, -1.002931, -2.260232, 2.612141, -0.452705, 3.334452, 4.471656, 0.040492, 0.043492, 1.906444, 0.762864, 0.013549, -1.428000, 0.704618, 4.283254, -0.447447, 0.242987, 1.894843, -1.150813, 1.425953, 0.571005, -1.205558, 2.250842, 1.010226, 0.975433, -0.190685, 2.520607, -0.260550, -1.565487, 2.237750, 5.694161, 1.458744, 1.499833, -0.964584, 1.126270, -4.017033, 3.835517, 3.099368, 2.741929, 3.745870, 1.924112, -2.769214, -8.153638, -1.366967, -0.222473, 3.104762, 0.408815, -3.334017, -2.308936, -2.294003, -2.051878, 0.342104, 1.393579, 0.115670, 0.384816, 2.063236, -0.901094, 0.488691, 0.358792, 0.578016, 3.229413, 0.266376, -0.348818, -4.958932, -0.798056, 0.753551, -4.482573, -2.943725, -4.519968, -3.022888, 1.796830, 0.265710, 1.506794, 0.902000, 1.777172, -1.468453, 0.287163, 4.323891, 0.919122, -0.175923, -1.098132, 0.103859, 0.678686, -1.698002, 0.703255, 0.417760, 0.247697, 2.633260, -4.531164, -1.101134, -2.511764, 0.845692, 0.613309, -1.745634, -3.350141, 1.451234, -0.067550, 2.045863, 0.066918, -0.112667, -1.870865, -1.044443, -2.118799, 1.548138, -3.268719, 3.594374, -2.292007, -0.731400, 0.638149, 1.171789, -0.541452, 2.279704, 0.189897, 2.355322, 0.354103, 0.437733, 2.829152, 1.429048, 0.933853, -0.411465, -0.497428, -1.422664, 1.682440, -0.538568, -0.112830, 1.735603, -0.933443, 2.339074, -3.122605, 1.465831, 0.569450, 1.968063, -1.178774, 1.301933, -1.622244, -4.631080, 2.460362, -1.732073, -3.284402, 2.536322, -3.206595, -0.230824, 2.381750, -0.767115, -7.129733, 0.176164, 0.331321, -0.749577, -0.663327, 0.478035, -1.901979, 1.249618, 5.354519, -3.093740, -0.885269, 0.886041, 1.691122, -1.000635, -1.291685, -0.424720, -0.564069, -4.108914, 0.154632, 0.835152, -1.706249, -0.724605, 1.825291, -2.242805, 0.980393, -0.696546, 0.399662, 2.416517, -2.199752, -0.551790, 2.116515, 0.207906, 1.171264, 0.569540, 0.002868, -1.336563, 0.920318, -1.586634, -1.307527, -1.973062, -4.813841, -0.904314, -0.464118, -1.834300, 0.694039, 2.206343, 0.832556, -2.252901, 1.668644, 0.868798, -3.234017, -0.940149, 2.856216, 23.049131, -1.368276, -1.221370, -0.546990, -0.740844, -0.585601, 3.795197, 0.829124, -1.421780, -2.747960, 3.710470, -1.213560, -1.911591, 5.013275, 0.673200, 2.373624, 5.593239, -0.657087, 0.307902, -0.746474, 1.093833, -2.048117, 1.662055, 7.558481, 5.067765, -3.247648, 0.402622, 1.452313, -2.936994, 1.637158, -0.491472, 2.239504, 1.444507, 2.413685, 0.213852, -2.163628, 3.432185, 6.881503, 1.099702, -0.305388, -0.558462, 1.285792, 0.311286, -0.607929, 1.466767, 0.773958, -0.885835, 0.165974, -3.087383, 3.033762, -0.593204, 2.427591, -1.382962, 0.657607, 1.489019, -0.537272, -0.025472, -2.250768, 2.848840, 4.054736, -0.610624, 2.775694, 0.896121, -1.135586, 3.143772, 0.262527, 2.628227, -0.285485, 1.432166, -0.431331, -2.357857, -3.904627, 1.781239, 0.233672, 0.943503, 3.086826, -11.265800, -1.675461, 1.690181, -1.013600, -1.180526, 3.663422, -0.594121, 2.752018, 0.648223, -1.827538, -0.465675, 2.111429, -1.462047, -2.965230, -0.563627, 0.869006, -0.651719, 1.894362, -3.315145, 3.054309, -0.267607, -0.312651, -0.644268, -0.030751, -0.662597, 1.378941, -2.936244, 1.098406, 0.021220, -0.233837, 0.533106, 0.207417, 0.242259, -1.860722, 2.138731, -0.163239, 0.123441, 3.316773, 3.244506, -0.519945, 0.607942, -1.150754, -2.454610, -1.068099, 1.211501, -0.863424, 1.330000, 0.356083, 2.411207, -0.378593, -0.009602, -9.444021, -1.246032, 1.298337, -5.255346, -3.966677, -0.270956, 0.393291, 0.164479, 0.425593, 2.018335, -0.624935, -0.044252, 2.483628, 0.823812, -1.851756, -1.228499, -1.269864, -1.755970, 0.393510, -6.143491, -0.245994, -0.033852, -0.529157, 0.950466, 2.724706, 1.496825, -1.469126, 1.589233, -3.745199, -0.860516, -0.340473, 0.733098, 0.764605, -3.821471, 0.853751, 0.148301, 2.213405, 4.123124, 1.283300, -0.087948, 1.402946],
+	"qwen2.5-coder:latest":   [-0.436007, -1.314809, 0.139666, 5.148615, 1.838311, -1.521601, -3.773632, 0.581152, -0.971788, 7.219580, -0.066787, 1.198955, 1.659349, 3.621571, 1.779006, -0.560876, 0.355461, -1.897615, 2.303185, 0.109255, 6.044055, 1.446214, 0.674157, -0.692821, -1.987091, -2.048842, -1.936660, -2.349168, -3.152107, -3.172126, 0.873809, -1.490791, 0.970821, 1.441998, 2.091025, -0.651286, -2.027265, -1.386296, -0.700867, 2.765764, -1.228565, -0.294553, 0.431368, 1.284794, 0.631266, -0.846027, 16.100187, -0.651704, -0.213284, -1.980950, 0.748919, -1.524519, -3.034139, 1.428813, -0.906473, -0.359399, 0.568063, -0.806468, -0.336932, -0.204364, -2.249646, -1.886204, -2.921830, -2.873020, -2.915986, -0.558776, -5.761330, 0.710662, -14.035521, -2.583575, -0.747539, 0.809441, -0.951276, 0.723559, 1.804405, 0.222171, -1.118399, -0.074350, -11.002135, -1.436381, -1.212464, 0.143454, -2.309623, -2.390476, -0.117248, 1.324861, -1.891298, -0.915926, -0.236022, 2.277486, -1.947304, -1.112398, -0.341477, -1.563656, -1.073132, 0.275794, 2.828944, 0.914620, 2.530140, -2.008723, 1.740619, 1.433238, -0.137113, 1.737560, -2.091690, -0.144877, -1.682601, -0.273133, 0.891697, 3.031501, -0.963722, 2.925961, -0.128959, 5.033491, -2.237801, -0.329063, -0.569064, 1.594402, 0.993208, -5.571981, 3.424447, -0.005736, -1.073074, -0.499183, -1.037315, -0.981740, -0.731634, -2.787812, -2.274329, 0.616070, 2.383073, 0.642813, -0.994558, 0.252382, -4.711788, -0.271676, 3.495092, 1.964713, 1.607625, 1.494871, 1.336313, -1.587251, -1.514070, -5.126953, 0.097945, 0.919678, 2.441731, -2.928982, -0.321582, 0.855205, 1.654471, -1.153838, 4.138180, -0.955069, 1.078292, -1.350932, -4.036607, 1.604253, -0.872791, -1.828661, 2.580978, -0.709960, 6.295347, 1.293245, -1.309786, -2.217097, 0.007649, -2.884886, 1.301318, -0.435597, -0.482649, 3.068917, -1.476866, 2.451068, -1.917819, -1.498407, 0.184997, 1.833188, 0.816717, -0.623813, -2.193322, -1.512282, -1.615667, -1.072993, -4.262381, 0.010529, 1.257217, -2.236494, -4.156431, -0.603306, 2.949536, -0.846157, -0.592448, 1.623096, 0.024929, -0.389215, -1.058699, 0.131432, -1.387448, -3.724334, -0.617063, -1.108744, 0.636820, -1.469211, 2.240824, 0.869600, 4.049538, 3.993536, -0.610457, -0.671499, -3.111218, -2.991330, 0.696155, 2.608358, 1.001497, 1.474154, -3.532830, 2.660499, -1.066343, 2.081527, -1.972203, -0.570178, 1.080361, 0.286836, 0.940870, 1.073439, 0.588628, -2.507718, 2.726036, 0.290909, 1.218498, -1.227438, 0.014571, -0.629023, -0.212335, 0.783704, 4.464511, -1.721464, 8.572509, -4.255687, -0.337520, 0.154504, -0.440107, 1.332823, 6.227478, 1.853769, 0.825418, -0.090258, -0.175270, -0.294410, 2.580056, 0.677362, 2.338051, -0.415923, -1.507566, -3.802147, 0.664998, 1.294685, 0.533289, -4.037803, 1.228877, 1.029611, -2.024710, -2.125656, -0.549981, 2.533221, -2.395383, 0.313111, 0.601733, -2.608969, 0.216491, 0.406232, 0.609647, 0.711135, 2.503493, -1.242730, 0.346222, 0.542468, -3.621418, 0.442082, 0.813194, 2.384302, 0.784717, 0.717723, -1.476249, -1.676991, 0.117224, 0.378211, -0.194512, -0.284983, -3.506894, -1.025167, -0.378918, -1.602937, 1.909185, 2.551661, 2.117800, -0.885118, 0.708412, -1.859785, -0.479130, -0.738324, -2.670425, 2.509346, -1.601109, -2.061625, -1.747532, 2.314368, -2.034420, -2.830469, 0.645515, -0.335051, -1.528542, 1.276098, 1.399137, -2.062893, 3.157044, -0.355756, 0.008239, 0.529171, -3.172691, -1.092223, 4.341345, -4.409624, -0.345799, -1.003428, -4.002488, 1.766916, 1.763788, 1.652610, 0.630642, 0.222056, -1.701876, 2.214486, 2.379781, 0.916326, 2.501186, -0.940371, 0.257316, 0.774674, 2.868283, 0.917253, 0.978641, 0.544919, 7.170478, 1.259203, -1.779769, -1.019169, 3.777537, 0.795997, 1.963864, -0.853626, -0.343095, -1.443662, 0.394236, 0.305709, -0.351705, 0.524120, -1.773943, 2.920990, -0.804305, -3.644200, 2.719176, 2.336826, 2.026261, 0.880051, 0.758393, 0.228448, -2.986163, 5.670512, -0.611930, -0.002579, -0.251571, 0.224900, -1.935357, -0.920759, 8.231796, 1.971320, 1.576411, 1.943412, -0.953862, -0.375397, 3.036218, -2.298655, 0.889425, 1.108079, 0.258971, 2.647190, -1.539036, 0.500190, -0.727908, 1.263729, -3.033226, 1.961906, 0.810480, -2.089427, 0.708266, -4.047872, -0.527808, -14.221658, 2.361469, -1.320704, 0.836442, -0.386591, -2.236613, 0.486746, 1.005684, -0.821714, -0.599291, 3.795557, -4.883873, 3.555826, 0.689591, -0.508781, 1.518971, -2.128448, 0.553367, -0.200060, 1.611065, -1.504008, -3.721884, -3.394505, 3.972555, -0.445324, -1.183890, 3.563913, 1.988339, -0.476866, 0.289509, 0.277188, -0.175615, -1.041347, 1.197368, -0.477900, -3.759550, -5.051634, -1.572159, -0.226668, 0.180172, -0.086704, 2.489282, -3.111502, -2.003207, 1.001518, -0.583694, 1.663272, -1.637953, 0.545772, 2.723981, 0.770260, 0.925456, 0.989657, 2.473933, 2.747635, 2.175160, 1.760652, -0.013310, -4.443879, 37.422813, 1.288954, 2.467471, 2.146290, 1.013962, -2.084025, 2.127673, -0.888463, -1.521253, -0.895446, 4.524404, -0.104275, -1.633331, -9.016782, 1.046254, -1.463881, 3.555183, -1.504391, 0.277646, -1.592795, 0.272081, -0.698069, 0.696722, -1.410604, 1.937863, -0.376410, -1.499335, 1.668633, 1.260768, -2.925495, -1.584171, 1.157681, -3.337044, 3.435848, 0.386049, 1.061394, 1.006677, 0.669047, -1.470088, 1.235580, -1.276175, -0.555348, -1.114300, 0.408720, 0.459978, -2.024472, 1.675984, 1.306379, -0.254271, -2.635481, 1.731658, 2.153453, 0.928214, 0.306720, -3.400702, 2.915029, 0.031397, 1.996717, -1.877975, -4.729939, -0.609253, 0.541097, 1.393276, -0.035789, -1.044343, 0.889185, -0.336026, 2.131327, -1.250257, -1.924997, -0.287107, 1.208007, -1.054186, 1.337112, 1.274661, -2.434684, 3.016158, 1.469676, -0.695139, 1.505548, 2.100273, 1.219837, -0.772411, 3.593307, 5.785521, 2.612813, -3.146140, -0.555663, 0.243268, -3.966340, 0.486868, -1.174454, 1.676140, 0.317372, 0.213547, 0.103339, -1.636872, -1.078531, 3.002460, 0.008469, -2.326029, 0.867810, 0.871880, -1.080869, -0.768073, -4.836300, 0.383782, 0.691735, -0.713520, 1.354015, 2.611808, 6.183017, -1.084267, 0.292993, 0.816155, 1.273668, -0.560137, -1.290062, 1.882704, 1.010641, -1.717645, 1.066451, 0.092508, 0.932700, 0.062565, -0.448221, -1.144008, 1.872820, -1.933916, -4.155032, 1.226316, -0.842341, -1.021217, 1.994580, -0.036678, -0.072384, 0.812870, 1.374167, -2.501688, -0.321656, 3.544239, 2.224328, 2.181247, -1.169514, -2.582330, -1.573699, -2.363550, -2.202179, 1.673287, 0.711271, 3.818364, -1.154218, -0.725751, -1.511678, 0.080292, -8.115035, -3.127221, -3.172623, -1.133258, 5.428519, -0.574502, -2.239282, 3.485080, -2.038507, 2.057469, -0.269610, 1.192541, -1.718417, 1.374550, -1.552978, -2.797242, -2.503451, -4.544235, 0.636139, 1.108707, 3.459209, 2.595264, -0.305967, -0.003920, 5.865139, -2.725132, -1.437748, -1.930311, 1.015183, 0.153521, -0.693563, -1.841589, 1.740456, -0.131857, -1.259349, 1.107379, 0.814856, 2.672195, 1.490787, 0.823584, -0.483895, -0.674276, 3.958681, -4.697596, 0.824732, -0.919115, -4.634318, -0.438301, -0.317027, -0.150343, 1.811067, 1.930733, 2.765487, -3.315078, -1.065359, 1.905147, 1.231166, -0.812233, -0.079013, 3.098343, 1.337479, 0.038435, -1.502123, -0.722991, -0.738154, 1.651970, 3.549967, -2.832844, 2.526853, -4.530358, -1.405437, -0.020393, 1.433086, -0.355914, -1.094193, 0.618523, -0.507370, 1.855727, -0.352494, -1.034884, -0.052144, -1.694263, -1.334652, -0.578301, 1.324574, -1.522585, -0.899184, 3.180330, -2.992112, -0.966538, 1.624621, -2.214644, -0.544830, 0.516665, -2.317800, -0.701623, 0.938447, 0.935423, 7.276221, 2.074192, -0.549290, -0.723959, -1.024809, -2.093009, 0.378594, 1.273304, -1.574900, -2.294054, 1.627634, 2.471876, 0.272425, 0.226814, 0.288442, 0.485079, 0.329602, 0.418512, -1.030336, 0.618441, -0.205079, -0.751919, -0.533747, 0.987899, -0.886300, -1.857596, 0.970968, 0.782056, 1.377219, -3.131898, -2.278232, 0.560626, -5.161348, -0.380426, 0.495337, 0.181925, -0.484390, 2.722185, 1.441381, -3.070866, -1.380223, -2.827929, -0.428279, -2.406868, -1.231688, -1.393025, -2.365047, 1.180624, 4.354129, 0.116944, 1.813338, 0.807496, 5.356595, -0.893292, -2.298326, 0.608894, -2.458173, -3.912065, 1.234876, -0.789027, 0.989614, 1.082788, -0.383455, 0.617175, 3.955452, -2.602732, 0.340088, -2.055584, -1.332299, -2.340340, -2.757793, 1.896885, 1.322887, 0.021037, 2.907897, -0.321884, -2.944226, -1.640788, -1.364418, 2.449870, -4.300892, 4.101160, 1.867850, 0.166516, -0.710645, -1.172884, -1.457727, 2.929085, -0.218713, 2.206084, -0.883459, -1.040819, -2.344896, -1.173216, -0.345644, 1.559119, -1.632681, -2.044267, -2.017350, -0.654807, -0.716796, -0.901010, -1.560858, -2.691737, 2.685868, 1.782925, 3.339304, 0.565915, 0.391605, 3.610030, -3.298944, -0.705625, -1.543735, -1.870102, 1.338298, 1.612513, 0.385103, 1.112341, -0.001102, 0.658675, -0.503109, 1.369964, 2.857534, -0.630183, -0.912625, -1.217065, -0.550542, 2.285744, -2.127434, -3.157195, -2.480932, -0.733192, -1.617258, -1.182549, -1.050172, -0.540184, -1.505584, -1.097327, 1.250695, 0.358068, 1.122536, 0.025612, -0.006341, 1.774774, -2.286098, -2.659644, 3.301547, 0.470509, 1.290470, -1.164262, 0.626534, -3.474944, -1.804067, -1.576454, 3.310174, -0.299973, -1.118997, 0.285524, -0.434010, -0.235016, 2.567713, 2.659858, 0.159478, 0.923787, 0.271707, 0.721747, 1.390933, 1.695482, -35.562675, -0.141364, 3.048325, -1.559244, 0.738925, -1.291286, -2.524163, -0.560224, -2.540498, -0.174527, 7.668949, -0.468271, 2.184141, -0.495266, 4.118879, -0.306242, 1.102313, -4.138959, -3.319368, 1.504961, -0.296103, -0.015277, -0.136177, -2.483171, -0.736024, 2.367176, 2.261982, -0.849145, -0.117148, 0.933736, 1.702317, -0.301388, -0.906758, -1.500765, -3.250679, 3.582183, -0.331119, -0.037043, -1.458076, 1.892284, -0.766386, 0.352996, -0.095548, -2.071965, -0.491389, 2.410537, 2.485983, 0.411316, 1.657690, 2.453979, 0.544419, 1.520648, 0.534818, 5.743890, -1.727980, 1.763634, -1.548289, 4.157279, 2.225596, -0.064131, 1.865851, -2.642000, 0.274984, -0.866992, -0.558188, -6.135097, 1.808120, 1.006938, -0.790857, 0.461167, -0.860196, 2.084737, 5.253679, -0.978117, -0.453410, -0.819495, 0.236075, -2.136300, 0.021472, 0.437208, -2.464820, -0.033580, 0.188308, -3.331381, 3.856862, 3.718570, 1.367965, -0.220747, -2.310231, -1.149265, -1.234862, -1.002847, -1.974621, 1.130993, 4.297782, 0.930710, -1.492884, -0.713347, -0.377052, 2.645738, -0.489495, 4.203647, -0.183929, 2.602280, -0.300018, 0.319702, 0.484708, 1.726325, -2.608667, 0.224787, 0.628384, -0.105292, -0.063091, 3.132477, -1.491148, 0.832812, 2.437400, -1.012723, 0.005830, -1.205948, -3.225458, 3.295642, 0.629336, 1.091727, -3.268245, -4.394265, 1.717201, 0.162753, 2.099977, -0.146153, 0.175116, 2.906495, -3.290455, -1.285598, 7.942502, 0.160152, 0.179591, 4.543355, 3.116941, 3.622699, 3.588044, 0.600032, 1.724521, -1.439306, 0.903875, 1.313146, -0.755100, 8.836950, -0.952139, -0.333382, 0.589961, -1.261379, -1.519937, 0.594025, 1.764304, 4.243717, -1.108132, 1.925209, -3.976291, 0.235073, 1.686457, -2.469280, -8.696437, -3.778089, 0.477731, 2.229237, -1.451623, -0.649171, -0.769915, 4.691844, -0.204187, -1.182377, 1.737289, 0.001245, 0.825427, 0.922635, 0.588298, 1.344133, -0.989039, 4.205564, -0.115000, -0.715023, 0.977006, -0.833281, -0.086541, 7.658555, 1.852724, -1.467018, -4.778613, -5.723665, 11.013165, -0.535247, 1.304788, -0.278120, -3.020401, 1.007590, -1.811615, -2.275402, 0.979134, -1.302686, -1.351974, -1.836746, 0.542104, 0.011732, 0.467072, 1.436075, -1.912158, -0.329361, 0.906835, 1.310337, -1.657316, -1.454618, -0.057573, 1.054785, 2.734224, 0.864511, -2.898540, 0.721743, 3.449452, 2.660782, -3.281970, 0.793531, -0.939987, 2.136473, 2.435774, -0.931134, 2.375545, -1.911816, 1.508261, 1.438431, -1.554907, 10.636431, 10.181509, 2.162733, 0.092647, -1.967986, -0.263800, -4.868186, -6.291202, 2.514089, -1.563930, 2.339781, 1.775288, 0.459960, -0.443059, 1.163096, 0.124251, -6.209301, -1.731759, 0.933085, -2.717155, -2.114019, 0.764707, -3.598078, -3.171012, -3.224734, -2.070172, -1.446331, 2.061223, -1.382807, 2.091001, -0.697143, -1.502571, -0.759867, 0.295158, 0.411237, -4.074205, -0.500290, -1.297131, 0.634780, -0.869266, 4.393310, 0.298832, -2.684585, 2.340415, -2.036564, 0.622905, -2.296228, 1.926786, 3.597500, -1.945964, 0.066769, 0.744542, 0.415474, -0.575763, 1.847393, -0.139588, -4.165627, 4.270953, 2.814140, 3.188056, -0.450949, 0.978394, -0.130957, -0.081690, 2.020355, 1.264999, 2.465885, -1.305273, -1.484652, 1.913352, 1.274480, 1.071047, 32.553612, -3.937133, 1.768175, 3.947704, -3.099431, 1.153636, 2.373486, 2.360721, -2.020087, 2.675651, 2.172042, 1.795803, 0.559099, -1.527675, 1.322240, 2.126581, 1.912614, -4.063071, 1.399081, 4.218424, 5.575891, -0.873354, -0.366251, 0.625111, -3.856376, 3.276441, -1.155635, -2.581620, -3.619178, -2.831986, 1.644085, 0.133040, 1.446548, -1.878526, 1.286804, 2.888686, -2.470215, -2.362903, -0.971186, 1.676210, 0.187425, 1.839463, -1.896287, -2.081707, 1.814273, 0.938150, 1.786313, -3.683124, 3.041575, 0.135497, -0.442793, -1.932262, 3.042321, -1.016880, 2.041277, -2.040796, -0.421785, -1.340821, 0.105589, 2.341746, 0.292477, 0.475112, -0.495902, -0.475660, -1.491468, -2.629827, 3.888282, 2.804010, 0.150179, -0.908230, -1.783017, 0.495803, -0.610453, 0.289567, -0.078798, -1.560782, -1.805168, -2.729794, 1.172817, 2.256393, -0.442398, 0.522677, -1.631465, 0.356985, 3.064646, 2.438519, -4.161843, -2.152719, -3.548280, -1.114138, -0.859323, -1.193447, 1.367581, 0.220056, 0.163054, 1.273387, -0.971761, -2.162730, 0.737844, -0.055890, -0.216024, 0.862837, 0.151634, -0.601276, 0.358210, 1.970998, -0.229344, -0.017123, -4.781201, -1.324280, -2.443600, 1.611152, 0.149101, 3.477759, -0.751891, -0.717900, -1.417735, 1.407221, -1.185286, 2.538493, 2.014459, 1.288729, 0.121079, -0.775372, 1.095880, 2.870790, 2.828023, -0.678468, 0.029732, 0.468892, 0.173412, -0.554108, 0.868722, -0.347043, 0.670441, -0.115965, -1.428901, -1.543623, 1.486332, -0.581373, 2.728561, 0.982189, -2.209894, 3.659011, -0.832777, -2.435964, -1.310557, -1.055155, -1.834021, -1.562546, -3.038025, 3.090860, -2.067988, -3.092913, 0.118994, -1.485082, 2.321303, -1.523272, -1.430369, -0.029205, -1.739594, 0.019325, 0.569824, -1.613913, 1.451680, 0.781437, 2.524419, 1.962079, 0.689057, 0.560689, -1.565743, 0.606738, -0.366487, 0.825362, 0.567166, -2.249720, 1.356489, 1.744161, 1.633906, -1.390061, -0.507055, 0.629742, 1.448011, 1.131591, 1.812740, 1.272440, -4.645047, -4.487141, 1.566564, -1.905134, -2.191869, 0.572741, -0.455768, -2.307575, 1.607099, 2.300626, -0.653849, 0.221068, -1.337414, 3.450238, 1.781016, 1.688878, -1.666610, -1.287050, 1.609571, -1.283102, 1.244545, -1.443785, 1.087902, 1.376294, 0.741840, 3.563230, 2.121260, 1.941692, -1.210015, -1.075073, -1.537348, -3.691505, 1.077912, 1.348834, 0.498320, -1.474098, 2.513743, 0.034244, -0.481899, 1.553230, 2.030468, -0.253737, -0.082557, -1.063913, -2.305557, 0.269592, -2.979993, 0.824435, -2.439241, 0.592147, -1.105869, 0.421400, 1.004838, -1.749238, 0.952104, 0.919140, 1.684337, 2.167483, -0.795320, 3.446412, -0.198248, -3.708080, 1.209328, -2.476055, -2.325776, -3.323361, -0.395597, -2.130917, 3.682760, 0.225562, -0.801695, -0.575638, 0.496647, -1.356318, 0.600407, -3.152001, 0.148890, -0.074623, 0.417407, 2.860341, -1.896055, -2.015702, -0.401882, -9.507044, -1.447032, -0.036139, -0.848994, -0.379429, -0.110780, -0.272023, -0.108900, 3.433757, 0.450129, 0.396789, 1.146661, -3.451382, 2.723109, 2.754865, 0.767603, 1.128464, 1.264460, -1.380370, 9.841789, 1.019593, -0.664542, 0.108049, -4.518657, 3.028623, 0.955966, 2.159739, -0.992145, -3.089396, -2.380461, 3.170674, 3.399100, -1.193787, -1.100064, -1.222812, 1.331865, -4.171266, -0.224741, -0.748337, 0.600880, -4.090579, 1.785532, 0.843806, -0.897074, 1.120391, 0.226116, 1.339815, -0.558596, -0.229787, 0.421276, -1.492851, 2.736720, -1.181484, 1.033056, -2.783668, 0.868677, 1.473027, -0.409419, -0.926960, -1.578116, -1.657176, -4.058239, -2.139534, -4.226902, 0.042924, -4.044942, 0.975387, 3.117355, -2.011942, 1.897419, -0.394071, -2.194358, 0.765811, -1.147434, -0.198820, 4.750773, 1.958392, 0.566013, 0.746863, 0.205276, 6.244852, 0.292197, -0.264540, -1.483035, -0.521862, -0.821273, 0.241537, -1.867573, 0.887766, 0.970351, 0.943935, 0.907276, 0.863435, 0.609040, 0.758961, -2.844856, -4.021582, 2.337291, -0.033980, -1.227764, -2.930726, -3.163921, -0.012551, 2.236899, 0.202573, 4.800804, -3.107728, -2.295274, -2.600798, 0.614929, 5.242928, 1.385072, -0.441010, 0.496632, 3.172925, 1.638507, -0.125987, 5.617575, 0.405079, -2.154732, -2.598893, 3.025715, 0.283551, -1.228085, -3.021509, -1.903265, -1.605352, -2.429972, 0.022392, -1.591423, 1.237664, -0.669618, 2.921721, -3.849840, 0.507070, 2.304445, 1.411633, 0.925293, -1.438281, 4.524236, 3.233037, -0.987758, 4.162241, 1.087106, -3.555248, -1.064314, -1.531470, -0.240191, -2.561610, -1.132247, 1.349483, 2.722615, 0.037841, 1.215080, -1.428522, -2.192810, 1.110667, -2.803056, -0.672909, -1.977815, -0.052312, -2.126122, 1.366835, -3.752233, 1.900298, -0.700606, 0.833546, -0.918877, -1.297674, -1.865373, 2.671888, -0.841652, 0.527628, -3.107635, 1.778736, 6.087658, 10.168410, 1.901435, 0.008429, 1.880625, 0.691697, 1.251415, 2.020111, 0.165611, -1.962046, -2.567108, -1.646126, 1.804067, -4.159413, -2.876052, 0.582987, -2.315321, -2.834074, -0.143538, 0.333040, 2.520455, -1.053464, -0.936291, 0.497383, 2.377161, 0.688116, 0.252176, 1.265777, 1.036058, -1.827014, 0.232887, -1.180766, 1.928753, 0.833420, -5.280427, -3.046776, -0.869522, 2.473118, -4.426275, -1.009449, -0.393237, -0.908902, -0.088011, 0.554225, 0.937001, 0.837067, -3.216558, -0.756160, 1.128917, 0.964452, -0.273247, 0.137980, -0.334299, 0.644827, 1.049165, 1.687600, -0.582325, 0.678831, 3.991718, 3.445086, -2.419575, -1.215780, 0.836874, -1.800161, -4.176029, 1.932638, 4.352362, -0.640140, -0.239435, -0.297160, 0.884801, -5.469331, -0.966645, -2.994428, -1.457712, -1.884226, -1.562829, -0.063225, -1.810441, 1.137481, 1.578315, 4.733838, -0.413119, -2.760687, -4.963616, 2.259133, -1.462614, -0.125429, 1.826625, 0.243741, -1.940729, 4.980882, -1.852582, -2.769213, 3.123670, -3.025451, 1.775786, 0.248842, -0.728544, -1.077011, -1.391178, 0.466446, -1.327900, 0.010156, -1.119019, 0.406476, -3.023509, -2.991761, -2.242916, -2.700393, -1.149276, -0.574640, -0.623269, 4.592867, 0.085469, 2.845264, 0.500940, 1.761536, -0.758278, 5.217238, 0.938784, -0.517966, 0.385037, -1.285301, -2.375467, -3.277109, 5.389163, -1.802665, -1.425073, 2.843237, -1.299174, -2.772480, 1.326686, -0.461601, 2.181789, -1.470306, 0.363675, -1.825438, -1.595971, -2.633875, -1.263443, 11.719139, 0.751982, -5.437104, -0.402077, -1.769803, -0.488290, -3.136333, -0.206896, -2.774237, -6.601932, 3.922718, -4.739939, 2.086078, -3.384333, -0.272934, -1.534686, -0.467060, 1.184582, -0.470652, 12.171447, -2.855459, -8.415219, 1.164989, -1.923474, -0.980914, -0.601838, 1.957440, 0.195562, 1.815339, 0.264663, 0.158703, -1.772841, 1.136608, -0.290822, 1.927877, 0.562484, 2.267569, 0.140409, 2.506416, -0.439031, 0.860347, -1.896669, 1.397222, 1.579570, 0.485520, 0.957377, -1.102430, 0.442603, -0.920362, -1.657130, 4.251542, 0.022865, 1.276556, 0.728996, -1.456246, 1.469049, 17.869566, -3.002699, -2.320802, -0.407301, 0.102661, 1.159996, 3.357108, -0.603254, -1.308291, -3.801401, 0.142082, 0.596891, 3.836693, 1.812102, -2.562486, 2.357852, 7.259953, 0.651906, 1.206739, 2.925079, 0.821211, -3.675958, 4.616876, -0.251318, 1.483648, -0.104531, 3.231146, 1.392708, 0.213286, -4.196676, -1.593157, -3.684262, -0.936107, -1.851555, 1.740610, 0.759534, -0.725802, -32.275948, 0.821350, -2.229753, -3.098523, 0.335518, -3.705818, -2.480252, -2.382177, 2.335834, 0.559883, -0.078840, 0.265768, 0.672536, -0.617955, -2.180997, 0.050136, 1.086448, 2.409880, -1.411031, 0.431328, -0.727582, 1.202057, 0.676885, 2.333050, 0.667056, 0.704034, -3.499928, 0.377022, -3.475610, -0.461822, -0.347930, 0.258168, 1.602084, 0.519829, -0.155424, 1.457234, 0.126837, 0.867928, 4.091778, 1.680606, 4.067370, -1.098706, 1.745742, -1.184344, 2.251384, -0.088541, 0.205009, 0.219193, 0.667914, 3.437428, -1.572693, 0.404962, -1.385043, 2.847858, -0.488513, -0.725883, 0.752501, -0.153214, 1.029474, 1.962504, 3.080513, 2.361320, -0.633588, -1.436562, 0.260860, 26.884304, 0.354994, 0.023750, 0.750965, 1.353084, 0.426553, -1.632285, 0.965408, 4.016201, 2.177340, 1.253002, -1.670665, 1.720569, -2.180720, 0.322581, 0.279140, 3.985285, -3.724750, -3.344991, 0.349461, 1.836719, -0.266715, -0.634795, 1.627708, -2.453827, 2.661302, -3.227677, 0.053888, -2.480497, -0.681098, -0.049629, 2.152981, -0.581374, 0.205937, -1.006680, -0.501193, -2.547718, -0.441294, -0.789560, 1.551607, 1.530261, 3.002646, -3.386208, 0.647514, -1.249280, 0.674235, -1.530674, -3.838077, 3.722666, 2.963236, 2.088184, 3.554217, -1.200083, 1.763361, -1.686896, -1.194727, -2.250880, 0.875894, -0.186709, -25.303444, -0.505925, -1.991530, -1.867339, 1.825814, -0.766634, -0.213235, 4.148894, -0.046314, 2.148987, -1.871356, 0.335448, -0.485913, -0.093247, -3.264498, 0.528065, 0.522954, -1.395236, -2.970262, -0.581401, -1.861303, 0.173270, 1.324822, -0.018929, 1.717697, 1.011000, 2.334809, 0.878979, -0.626252, 0.987577, 2.981012, 2.250051, 2.358181, 1.915170, -1.237683, -2.741322, 2.350392, -2.240174, -2.580384, -0.634321, 0.317450, 1.725892, 0.162523, -0.422389, -0.178484, -2.114431, -4.840956, -0.001833, 0.139491, 4.319082, -1.384395, 0.323176, -1.353467, -1.339914, -1.007622, 8.807545, 1.286002, -2.342683, -1.185613, -1.472908, -1.924402, 4.893974, 0.222800, -0.740518, 2.156360, 2.747664, -0.312752, 2.395748, -0.615992, -8.395935, -0.730813, -0.073218, 1.852324, 0.291219, 1.468868, -1.620990, -0.756095, 1.401205, -1.078258, 0.483770, -0.073835, 0.852245, -2.719850, 1.460309, 1.374558, 8.152399, 2.677590, -1.674278, 0.375696, -0.073122, 0.925756, 0.923439, -0.069477, 2.279387, 2.252507, 1.538246, 1.814035, 1.929125, -1.111000, 0.341246, -1.168864, 2.812251, -3.087615, -0.544177, -0.092432, -3.101908, 2.925842, -1.453098, -0.303561, -1.021411, -0.136682, -5.494426, 0.610433, -2.182517, 3.119542, -1.883906, -0.745869, 2.522353, -1.648916, 0.309569, -0.577917, 1.033753, 2.487350, 7.941953, -0.071100, 0.527296, 0.779083, 6.350358, -3.325049, 1.075220, 0.105128, 0.276124, 0.347600, 0.306576, 3.274002, 1.036085, -2.652688, -2.018159, -0.050256, 0.416610, 0.418685, -1.532887, 0.523052, 3.134366, 0.387958, 1.859184, 2.089394, 1.539085, 1.657541, 1.209471, 1.878232, -3.116538, 0.800451, -3.243739, 2.656432, -0.071025, -0.537716, 2.768167, -0.689752, -4.555896, -2.193995, 1.733658, -1.575865, 0.762150, 2.772280, 0.646617, -2.128708, 2.234904, -2.126125, 0.459754, 1.413508, 0.166073, -2.205717, -0.741679, 0.306937, -1.672999, 0.225218, 0.055466, -1.127936, 7.234384, 0.063245, 1.979858, -2.608743, -2.807106, -2.397985, 0.106909, -3.808125, 1.513469, -1.036234, 0.811388, -2.431126, 2.732509, 0.589472, -1.097014, 0.551920, -1.516936, -2.533307, 0.797511, -2.425043, -3.588099, -1.160765, -0.590146, 3.004026, 0.810287, -2.107759, -0.651307, -5.370883, 2.917656, 0.991122, 0.444265, 2.693725, 0.286769, -1.541028, -3.018210, -2.283916, -0.207154, 1.837811, -1.245008, -5.110644, 1.981874, 1.236664, 1.905553, -0.207308, -1.105416, 4.194646, 0.440569, 1.659566, -0.880465, -4.547189, -2.118434, 1.557453, 2.841252, -5.531410, -2.707672, -0.314554, 0.362334, -2.255540, 0.359243, -0.935208, -0.932006, -1.131725, 1.080974, -1.834844, -0.971595, 2.742725, 3.401529, -1.261523, 0.220123, 2.862516, -0.858204, -0.541691, 1.122402, 3.115028, 0.782756, -1.248350, 0.043220, -0.986485, -0.844554, 2.879936, -0.039591, 0.383270, -0.683167, 1.174577, -0.593699, -1.137541, 1.152888, 1.766484, -7.748785, -0.404558, -0.467036, -0.500680, 1.611474, -0.383380, -3.673885, 1.017248, -0.770349, 0.237903, 2.069979, 1.630354, -2.739210, 3.819390, -1.732684, 2.563601, 2.213180, -2.380856, -2.852253, 1.170615, 0.828863, 1.211223, 2.499185, -0.747931, 0.118144, -1.666906, 0.565364, -1.657013, 0.774405, 0.693492, 7.705479, -1.408086, -0.155176, 1.412633, -5.123066, -0.853785, 0.116899, 2.256178, -2.218943, 1.518238, -0.196591, 0.143943, 0.285298, -2.482928, 0.615830, -2.046060, -0.975004, -2.833664, 3.254058, -1.370345, 0.944582, -0.014555, 1.834112, 2.484418, 0.814740, -0.132238, 1.918314, -1.774436, 0.460359, -0.580844, -1.645435, 1.954685, 2.618668, 0.267995, -2.560254, -0.594974, -2.286581, 4.714139, -0.405705, 1.295618, 0.840673, -2.838477, 0.366501, -1.036157, -0.400613, -1.738227, 1.300794, -1.672292, 3.829979, -3.479953, 3.891050, 0.341126, -0.399483, 1.662642, 3.562313, -1.184650, 2.612743, -3.805794, -2.371483, 1.948942, 2.188965, -0.459029, -0.178023, -1.700147, 3.120115, 5.629900, 1.224204, -1.879678, 1.268884, -2.494606, 0.783073, 0.035119, -0.997701, 0.350684, -1.231518, 1.909020, -2.098758, -0.661888, 0.994528, -1.374923, 2.376079, 0.031113, -1.458987, 0.113488, 0.266556, -2.248442, -0.549227, -1.980329, -0.843419, 0.893711, -1.482391, -0.773147, 1.418742, -0.100246, -2.365190, 1.173016, -0.702880, 0.275237, -1.607930, 0.260333, -2.186061, -0.709117, -1.991098, 3.764254, -3.194555, -1.260547, 1.071692, 3.033343, -0.641039, -0.255529, 3.194899, 1.048945, -0.669115, 4.109255, -2.829791, -0.658942, -0.670331, -3.036335, 1.109055, 0.316420, 9.496139, -1.423056, 4.316561, -3.777977, -0.611273, 4.206706, -3.108094, 2.093183, -0.592171, -2.694616, -2.441919, 0.851335, 0.160457, -2.691463, -0.074577, 0.744847, 1.683688, -0.708158, -1.727233, -1.502183, 0.525043, 0.983311, -0.089715, 0.532941, -0.811899, 0.976230, -2.550968, -0.344250, -0.052567, -0.187679, -0.121404, 2.285113, 1.596615, -1.304425, -2.099068, 1.705707, 0.282685, -1.729276, 1.459553, 0.320318, -4.074892, 1.004075, -3.545393, 1.803405, 0.490817, -4.327343, 1.060507, 2.926682, 1.222938, 3.830746, -0.337875, 1.288773, 0.835695, 1.570575, 3.477370, 0.256512, -1.542086, 0.159785, 0.399716, -0.581507, 2.310691, -0.635375, 0.457800, 2.778989, 4.189120, -1.164997, -2.390188, -1.172381, -2.444343, 0.283850, 2.672297, -0.091714, 0.491060, 3.292467, 0.893021, -0.021946, -4.014757, -2.941429, -3.471601, 0.691645, 0.227577, -0.660446, -0.993292, 1.157482, 0.663260, -0.723520, -2.946966, 0.588723, 0.340043, -0.358396, 1.354849, 1.527313, 0.792265, 0.354935, -3.001401, 2.202762, -0.826216, 1.678419, -0.252368, 2.635601, 3.578719, 3.746924, -0.271551, -2.890261, 2.253434, 0.673195, 1.846275, 0.377794, 1.797091, -0.269892, -2.769761, 0.315330, 3.191075, 1.445889, 1.961443, 0.250564, 1.217402, 1.685299, -0.773906, 3.594047, -0.128995, 0.446818, -2.274753, 1.524932, -5.203915, 1.088118, 0.079942, 3.207416, -18.327145, 1.500175, -0.097854, 2.531832, -1.950670, -0.083165, 2.955576, 0.681629, -0.329289, -3.521765, 2.633065, -0.085364, 5.152068, -0.129515, 2.816390, -3.628945, 0.186241, -1.867233, 1.983360, -0.515092, 1.410643, 0.049975, -0.440827, -0.180698, -0.876025, -1.237767, 2.805179, 1.542505, -0.951790, 0.443897, -2.973520, -3.115988, -3.136214, -1.392136, -4.330337, -2.264652, 0.625757, -4.091477, 5.732088, 0.250295, -0.520872, -0.999017, -5.399399, 1.408189, -1.518100, 0.076899, 0.173335, -0.000062, -0.360869, 1.392707, -0.592256, -0.071057, 1.330815, 1.857339, 1.405391, -1.056357, 3.606451, 0.844912, 0.430513, 0.705153, -0.095922, -3.005373, -6.676332, -1.697640, -2.368549, 0.382992, -0.944774, 1.198584, -1.091452, -1.749086, -3.316520, -2.743624, -0.783803, 0.405243, 0.186438, -2.742365, 2.742394, -0.300057, 1.709888, -0.119355, -1.055460, -0.554546, 1.549400, 3.316050, 1.876061, -0.066178, 0.378583, 2.503380, -0.843418, -2.350612, 1.370083, 4.067267, -0.592636, -1.435324, -0.224314, 3.440698, -3.128674, -1.354647, 0.871077, 0.835582, -2.731527, 3.152105, -13.977840, -3.494694, -1.888668, -0.369791, -2.067304, -1.631454, 1.369503, 1.902894, -0.525999, 0.394126, -0.526112, -1.452425, 0.266547, 0.910412, 2.690451, 0.452772, -0.904265, -0.274242, -1.941391, -0.781456, 1.339111, -0.650924, -2.150021, 0.411827, 0.306410, -1.720717, -0.847376, -0.433327, 8.572508, 5.576869, -1.604961, -3.355456, 0.578090, 3.117163, 0.740736, -1.816481, -1.542356, 1.402068, -3.337177, -3.460914, 0.022969, 0.263521, 2.447577, 2.692053, -2.677152, -1.154691, -0.166149, 2.455483, -1.960949, 0.644558, 0.451422, -2.235790, 0.731520, -0.340948, -1.902708, 0.762279, 0.125709, -1.536236, -0.245376, 5.871339, -1.315382, 1.152163, 0.313443, 2.684904, 1.522253, -2.293488, 1.985134, -1.717681, 1.696683, -2.022664, 2.500051, 0.610928, 2.378994, 0.165070, 2.061111, -0.533743, 2.667622, 3.499480, 2.485019, 1.861316, -1.150906, 1.217355, -2.907215, 1.377991, 2.626987, 2.557068, -2.540422, -6.635588, 0.997638, 3.295728, 0.550338, 1.183280, -0.518508, 2.825861, 3.199516, -0.683444, 2.162984, 2.370837, 4.094376, -0.561323, 0.784378, 0.182726, 1.457809, -1.172417, 2.387145, 0.324454, 1.190073, 4.428410, 1.672801, 2.330206, 3.335100, -0.557584, -1.865616, 0.313762, 2.122341, 1.508437, 2.887865, 3.216261, 3.003069, 4.676777, 3.199836, -1.936886, -1.224461, -0.051272, -3.227903, 1.606121, 1.955314, 0.546526, 0.661211, -1.968910, 1.787858, 1.048485, -0.461875, -0.509854, -1.147480, 1.632553, 1.572888, 1.340511, 2.246652, -0.744147, 1.609240, 0.752408, -1.323570, 1.025877, 0.814479, -1.259313, 2.315588, -1.214857, 0.447328, 1.474373, 1.504266, -0.497109, 0.577923, -0.198901, 0.571817, 1.572346, -0.229665, -0.482301, 0.700038, 1.471916, 0.754951, -4.222556, -0.885760, 2.337687, 3.346586, 0.457164, 1.315627, 1.394005, -1.703673, 0.883069, 1.166987, -0.816095, 1.200109, -2.375040, 0.826257, 0.166096, -1.786063, -3.522976, 3.031999, -3.488905, -0.379346, -1.583687, 1.950091, -0.198649, -1.979665, 0.650037, -1.176644, 2.323772, -1.853436, -1.898267, 0.950301, 2.705135, -1.034136, 1.889340, -1.965656, -1.262732, -1.637557, 0.371705, 0.813203, -2.314916, -2.003534, -0.387944, -1.389285, 0.283866, 3.240730, -5.791040, 2.754301, -1.986605, 5.498390, -0.273563, 2.022370, -0.106619, 5.943636, 2.984904, 0.613079, -0.669535, -0.343581, -2.248723, 0.348882, 0.099603, 0.015564, -0.590038, -1.586601, 2.235744, -1.424522, -0.791221, 0.587769, -1.771873, 0.902260, -0.221643, -1.720520, 0.883625, 2.038339, 1.595812, -0.467120, 1.634040, 1.504307, -0.748396, -1.020328, -4.903410, 1.616802, 1.099766, 0.745407, -0.021960, -0.519794, 1.157142, -0.792646, -3.190317, 2.819183, -1.442791, -0.101592, 0.509061, -1.473928, -2.708030, 0.791176, 0.473671, -1.907742, 2.050101, 0.003960, -1.289529, -2.341501, -0.373291, -0.925099, -0.040393, -0.479414, -0.349596, 1.877697, -0.776685, 1.993453, 0.372975, 0.333932, -2.029835, -1.045179, 2.716208, 1.865170, 0.747971, 4.798818, 1.579798, 1.555352, -1.155734, 3.145859, 0.887819, -3.669680, 0.728159, -0.804880, 1.060545, 0.031082, -2.118540, 1.153332, 3.899940, -1.163471, 1.295803, -2.362670, -2.000762, 0.001854, 0.792970, -2.817198, 1.936855, -3.864910, 2.709139, 1.039248, 0.088502, 0.231339, 0.591631, 0.030345, 1.299663, 3.123892, -0.961180, 0.043213, 5.733878, -4.098113, 3.308362, 0.510799, -4.140917, 0.843890, -1.273466, -2.071602, 1.445319, -1.818631, 0.162340, 1.681147, 0.362149, 5.492742, 0.825089, 1.722855, -0.476926, 0.432826, -2.043187, -2.016047, -2.091608, 0.597438, 0.042428, 3.973989, 0.883174, -0.129641, 2.237979, 0.567980, 0.839164, 0.976711, -2.264026, 1.649265, 1.344430, 1.434795, 5.406022, 3.077146, 2.661374, 3.106198, -2.270719, 0.887519, -0.145979, 3.447535, 1.555766, -0.138652, 1.195158, 3.220088, 0.879781, -2.362654, -6.360222, 0.395982, -2.533304, 4.291794, 3.155651, 1.251960, -2.407456, -0.218786, -2.583162, -0.899505, -0.780827, -0.983244, -3.478741, 1.778161, -0.658125, 0.597271, 1.897745, 1.777378, -0.100952, 1.271212, -0.681034, 2.460438, 1.110631, 1.252983, -0.114646, 0.479300, 1.725348, 4.230619, 1.187448, -2.427088, -1.236009, 2.918612, -1.148390, -1.583798, -1.435729, -1.783818, -2.767493, -1.541569, 0.203426, -1.242959, 0.281229, -0.392559, 0.520429, -3.234192, -5.539012, -2.151313, -2.462922, 0.635300, 1.190563, -0.151796, -0.622898, -0.042642, -0.083337, -0.730799, 3.834823, -0.641445, 1.817954, 3.308098, 2.103870, -1.456245, 1.461707, 1.371928, 0.733169, -2.590485, 0.514485, 3.581903, -1.606874, 0.716330, 1.596098, -0.624766, 0.570248, 0.343349, 0.814959, 2.110700, 3.385014, -0.654017, 0.103144, 1.129105, 0.854295, -1.497854, -0.833896, 0.883463, 2.740201, 1.252455, 0.383430, 4.764862, 0.244701, 0.397635, 2.114073, 3.711519, -1.009765, -2.999290, -0.075239, 1.702090, 4.073386, -2.221128, -0.173297, -1.598970, 1.764099, 1.255920, -3.153074, 0.940827, -0.085776, -0.153299, 3.531004, -2.893436, 2.231733, -2.386345, -1.483328, -3.745522, -5.938141, -1.462745, -1.746019, -2.920442, 0.672212, 0.022831, -2.624349, -2.061772, 0.922139, -2.596588, 2.745512, 1.169222, 0.959605, 3.072976, -0.254427, 5.414840, 0.527497, -1.430076, -1.501413, 2.057433, -1.026962, 5.627439, -2.945267, 0.809274, 2.203258, -0.872946, 4.923897, -1.509921, 0.499868, -1.122034, 0.442121, 4.887639, 2.859808, 0.291162, -0.565278, 0.837086, -5.220106, -0.498070, 1.504637, -0.144360, -0.114923, 0.522100, -2.818848, 1.758005, -0.885172, 1.853490, -2.924307, 2.890154, 1.534062, 1.918770, -0.117491, -0.226110, 0.317553, 0.141368, 1.936240, 2.881628, 0.301003, -2.461584, 0.041076, 0.719569, -0.117806, -2.094253, -2.824019, -1.391719, 1.183342, 1.017699, 0.376079, -0.095749, 0.889862, 1.062274, -0.524244, 2.349395, 1.186792, 2.231821, -1.102010, -1.332299, 2.127689, 0.351990, 1.511401, 0.622317, -3.630643, 0.548977, -3.208701, -2.996888, 2.649668, 0.304146, 1.341268, -2.146057, -1.861894, 4.507286, 0.904826, 1.395878, 0.450848, 0.070200, 3.053755, -2.140768, -2.565916, -0.972909, 1.667059, 1.065236, 0.645459, 0.884246, 0.220186, -3.282768, 1.298033, 4.002408, 0.833334, -3.617452, 0.368626, -1.903755, -0.626035, 1.850133, -3.082982, -2.774525, 1.769177, 0.522702, 0.263647, 1.313907, 3.750313, 2.017340, 0.823528, 2.732456, -3.571347, 2.283923, -0.022426, -0.600996, 1.081142, 0.014277, 2.022802, 0.375120, 1.400345, -2.918789, -1.019804, -1.212387, -4.197097, 0.703760, -1.529950, 2.411883, -0.584985, 1.346535, -0.723685, -2.590233, 3.926456, 2.828357, -1.109949, 1.414086, -0.135390, 0.382299, 2.244257, -0.295509, 4.518786, -3.660137, -2.117182, -2.384901, 3.393075, 0.985744, -0.516841, -0.374062, -4.132094, 2.249375, 2.650640, 0.171410, 3.396479, 0.069148, -0.270228, 0.905647, 3.294580, -3.019566, -3.331932, 2.340150, 2.807488, 2.162702, -3.298753, -1.766656, 1.310992, 0.260271, -0.273308, 4.299369, 3.715046, 1.070362, -1.441203, 0.129578, 0.900721, 5.234731, 2.976517, -0.659467, 0.284069, -0.954808, -0.256831, 0.583463, 1.284158, 1.351618, -0.708611, 2.189399, 0.599912, 1.235909, -1.296227, -0.026225, -0.115726, -1.447470, 1.321840, -0.550714, -0.555223, 1.529182, -1.148362, -1.568398, 3.317645, -0.735568, 0.437838, -0.237062, -1.593969, -1.894876, -1.147040, 6.875043, 11.417304, -1.270446, -0.386883, 0.448277, 0.306192, 0.444976, 0.047800, 0.323642, 0.030898, -0.246496, 1.848722, -0.699640, -2.337017, 3.233449, 1.195664, -1.263332, 2.178815, 0.288599, -1.581660, 3.966269, -0.985900, -0.975200, -0.793356, -2.546527, 0.314768, -1.653216, 0.076431, 0.527626, -4.124629, -0.522641, 2.863382, -0.581339, -0.483502, -1.371282, -2.072694, 2.361188, 1.388546, 0.461689, 0.054946, 2.079017, -0.905662, -1.667842, 1.560647, -2.268772, 2.163057, -1.461485, -9.571809, 2.473650, 27.804079, -0.824428, 1.293797, -3.527821, 0.322644, -0.829934, -1.641960, 1.559845, 0.651142, -2.018788, -0.652320, -0.534693, 2.995028, 0.591905, -0.210980, 1.812501, -2.794134, -0.021838, -1.462607, -4.406371, 0.911137, -1.410916, 1.293619, 0.665017, -0.952757, -1.092961, 3.622439, 4.556889, -0.322486, 3.173908, 2.919918, 1.061108, 1.607022, 0.961167, 1.944426, -2.335304, -0.616037, 0.400255, 3.586429, 0.725041, 0.298737, -1.890886, -0.024518, 0.091631, -1.731019, -1.398399, 1.815781, 4.581513, 2.576745, 1.146513, -0.740690, 0.073969, 3.108618, 0.535761, -0.192938, 0.454423, 2.053000, -0.904300, 2.072179, 0.649410, -0.662264, -0.673923, -1.733026, 2.902329, -1.831697, 3.206669, -1.994342, 1.821010, -1.573532, -0.257321, 5.394223, -0.162241, -3.188801, 0.847548, -2.007671, 3.947463, 1.168257, 2.246848, 1.873971, -0.494318, 0.724031, 2.609424, -1.787059, -0.378604, -1.377902, -3.019529, -1.382988, 0.023967, -0.011094, 0.253847, -1.189952, 0.510796, 1.386401, -2.568677, 1.666898, 2.934337, -1.311064, 0.238594, 0.870015, -0.393368, 2.161388, 3.384138, -2.131927, -2.342558, 0.287042, 3.170932, -4.181534, 1.297255, -2.535244, 0.296611, 0.657501, 2.486350, 0.579065, 0.901807, -1.826978, 3.008395, 0.015162, -0.812392, -5.237899, -0.064044, -0.666688, 1.184950, 1.343734, -1.661986, 0.091535, 2.635372, -0.006127, 9.887297, 4.918875, 0.266848, -2.460409, 0.854952, -1.334359, 0.044901, -0.075265, 0.200052, -2.027168, 0.649583, -0.283191, -0.853923, -2.430825, 2.559544, -0.007875, -0.466313, 0.781030, -0.850323, -1.138282, 1.897385, -0.009445, -0.401657, -2.866511, -2.329610, -1.208160, -1.773448, 0.369010, 0.188112, -0.367383, -0.806061, -0.871504, -1.204538, 0.251074, 0.966655, -0.356678, 2.718896, 3.092988, -2.580250, -0.622785, -0.762455, 0.026455, -1.173746, -3.874841, 1.271729, 1.204147, -1.713592, -0.083661, -2.208164, 1.115615, 0.098561, -0.295923, -1.642114, 3.539276, -1.431937, -0.611073, -2.333883, -0.713340, -1.436290, 0.124211, 0.463780, -1.330325, -2.417066, -1.452549, -0.621094, -0.331997, -0.248763, -1.276592, -0.835736, -0.789976, 0.210436, 2.124712, -1.141259, 0.878823, 3.689657, -0.996731, -0.788607, -1.662502, 0.702889, 0.455417, 0.196082, -1.905845, 1.417846, 0.184089, -0.428133, -1.289425, 1.620126, -0.999889, 0.915726, 2.355841, 8.382513, -2.161212, 15.398536, 2.307024, 1.641678, -1.617959, 1.106972],
+	"qwen:latest":            [6.398593, -9.147916, 3.167985, -0.016698, 5.146078, 5.260313, -0.588165, 6.041094, -0.448700, 1.538580, 4.409100, 1.965835, -2.027954, 3.322869, -0.209723, -1.591358, -0.332919, 0.490199, -1.801314, -1.394502, 6.208906, 2.582299, 8.829220, -1.215299, -10.441714, 6.180038, 2.357235, -0.251530, -1.217379, 6.260386, -1.005010, 3.707588, 2.900968, -2.015647, 1.153357, 2.200070, 4.747961, -2.433741, 4.030592, -9.292274, -3.870678, 2.177808, -0.545548, -2.237058, 2.649582, -0.528622, 1.250894, -0.112586, -0.969227, -0.571061, -0.680663, 1.271584, -5.168596, 4.699327, 5.181798, 1.421188, 6.487319, 3.149934, 2.146110, 0.893968, 4.691792, -2.652787, 0.780389, -7.469417, -1.913996, -4.114168, -1.824573, 1.825537, -7.670074, 0.083751, 1.497249, -1.842984, -0.207182, -1.217132, 1.720826, -0.462654, -0.142980, 6.752104, 0.513007, -9.219392, -6.861326, -1.046578, -3.621952, 8.216535, -1.929723, -0.226388, -0.364569, 0.592417, 0.661270, -2.502738, 0.655540, -2.301271, -2.658660, 6.579635, -2.761786, 3.214799, 1.964015, 0.085705, 0.268774, 1.773046, 6.180820, -4.607996, -0.740156, 2.677974, -0.014020, -5.367133, -0.792135, 1.014724, -2.928968, -2.636251, -0.764111, -2.006562, 0.120694, -4.609838, 1.088676, -11.941098, 5.737967, -3.500586, 1.158638, 5.277419, -0.824252, 3.719745, 1.067214, 6.999010, -3.490391, 3.380606, -3.941285, 3.037470, -3.074491, -5.476095, 0.703748, 0.590704, 1.712015, 2.907949, -3.779428, 4.336009, -3.208945, 6.012998, -0.633373, -0.858766, 0.509552, -2.374973, 2.670326, -3.898677, 3.515145, -1.504274, -2.528835, -4.196266, 1.497983, -0.251315, 1.430840, -6.129501, -0.432433, -2.067062, 0.683940, -0.651003, 2.075870, 0.301573, 0.456413, 1.512750, -2.011982, -2.651834, -3.443306, -1.217246, 2.249681, 4.056913, -0.001803, 1.856310, -1.120802, 5.100811, -4.818334, -1.832536, 0.575663, 2.821508, -2.484176, 1.090846, 3.982687, -3.890761, 1.816324, -1.156088, -1.480178, -0.946538, -0.579305, -0.901073, -0.626901, 0.268467, -3.557676, -1.586237, 1.791939, -0.206871, -0.901410, 1.202474, 2.744930, -0.571335, 3.200516, 0.411471, -0.096053, 1.948472, 1.908952, 2.884496, 1.760468, -0.835073, 3.515426, 4.381332, -0.223358, -1.809401, 0.086503, 2.059222, -1.261783, -0.836733, 0.856923, -4.898800, 1.888360, -2.207174, 0.376906, 3.295334, 5.689600, -2.268684, 1.449032, -1.591826, 1.197192, -0.958380, -1.880106, 2.903637, -1.158304, 0.286425, 1.411350, -1.044144, 4.642242, 5.629546, -1.222408, -0.653535, -0.909632, 1.304384, 0.949307, 2.316329, 2.287262, -0.217201, -0.789907, -0.414622, -3.416742, 1.308199, -4.681273, 5.369122, -4.293337, 0.764565, -2.343703, -3.794211, 1.932296, 2.057181, -1.281097, -5.943415, 1.662839, 0.180384, -4.695090, 0.702479, -3.153823, -3.411024, 0.180862, -3.886384, -1.007891, -1.704118, 0.862702, -0.511424, 3.460708, -1.867585, -7.966740, 2.017166, 7.204095, -0.777631, -2.125801, 1.848246, 2.023623, 2.431535, -0.120389, -0.336207, 2.110109, -0.700987, 4.741506, 2.285265, 2.249003, -3.817211, 3.017461, 1.867846, 4.302247, 0.304751, -0.310234, -2.985993, 4.513823, -1.711970, 0.424881, 1.872621, 8.673007, -2.507097, 0.834635, 3.987681, 0.865946, -2.122818, 2.411204, 0.166906, 4.962452, 2.623420, -0.873380, 1.751310, 3.454344, -4.624747, 0.288112, -1.057867, -7.632612, 3.383047, 2.529626, 0.388937, 0.680255, 0.010386, -0.285309, 2.821573, 3.907364, 0.626498, 6.285956, 2.837718, -4.136279, 2.172159, -0.740020, 2.603785, 0.488508, -2.008139, 1.194486, 2.840358, 1.642457, 1.124606, -2.682470, 2.333769, 0.111492, 1.035283, 1.694300, 2.123012, -2.884374, -7.135779, 4.317023, -0.086166, -1.382437, 0.327938, 0.949700, 6.212725, -1.359681, 0.690782, -1.888311, -3.168814, -4.167558, -6.230211, -4.251054, 1.301581, -2.542017, -27.238344, 0.095754, 4.638476, 3.855103, 0.669284, 0.051903, -0.231133, 5.462236, 1.029191, -1.093251, -2.784316, 2.413422, 4.149455, -6.006116, 1.867116, -1.621314, -2.286732, 1.713983, 1.969203, 2.893601, -1.096513, 2.859676, 5.299241, -1.564409, -4.103854, 3.312290, 7.535967, -10.267817, 0.146586, 0.198330, -0.458738, 5.533854, -0.592334, -1.642502, -0.867962, 0.357863, 1.499991, 0.317374, 1.710249, -3.821599, -4.164293, 0.626525, 2.896627, 4.243000, 2.967313, 1.367663, 6.382295, -6.377450, -4.389627, -0.473153, 2.991648, -1.669134, 1.115779, -0.026947, 0.960267, -5.584852, -1.439730, 0.244562, 4.521901, 2.087858, -0.596442, 1.345291, -1.198195, 1.134727, 7.460937, 3.136116, -1.059861, -3.781231, -3.531691, 1.444958, -3.786484, 4.863374, -4.276469, 5.663068, 0.108844, -4.433540, 3.566478, -0.737263, -0.857777, 3.881980, -2.940099, -0.232970, 1.024321, 0.370978, 1.810182, -1.729423, -0.953740, 0.363640, 0.101128, -0.457497, -5.657916, 2.896876, -2.673734, -0.363334, 2.990057, 1.737989, 1.253162, -2.239347, -0.736923, -1.326416, 1.898398, 0.172377, 3.468918, -1.249191, -2.190415, 4.681655, 4.550338, 8.617194, 1.506140, -4.036048, -5.532407, 3.284861, 0.496790, 0.007859, 1.394254, 0.463913, -0.189794, -3.210449, -5.304472, 1.114539, -1.199341, 0.368409, -3.468653, 0.319924, -7.521216, -0.515546, -0.846355, 3.365871, -7.414912, -1.118039, 0.057726, 0.471262, -0.671277, 5.010963, -1.456864, 0.795316, 5.710810, 5.483665, -3.680223, 3.203779, -7.778675, 1.581216, 0.493372, 1.308644, -0.168083, -1.437484, -4.021763, 3.567986, -1.012697, -1.095128, 4.802341, -1.501106, -3.554114, 1.340577, -2.658166, 0.273735, -2.363988, -3.568919, 2.061281, 1.113543, 0.817031, -1.828942, -3.359740, 5.046678, 4.811258, 1.904614, 1.466768, -1.282211, -2.547651, 4.968533, -4.411788, 4.293465, 0.164253, -1.659536, 0.885092, -0.371292, -1.131706, 5.356121, -4.582285, -6.341745, 2.657330, -1.279451, -3.364145, -4.528135, 6.154132, 0.255915, 0.995843, -6.160520, -4.956539, 0.385687, 1.730230, -4.108953, 1.728342, 1.137076, 0.444596, -3.889635, -4.341253, -2.400633, 0.832284, -3.842432, -2.179685, 1.179994, -3.570244, -3.793373, -0.992903, -6.269731, -2.026199, -2.740402, 3.908176, -0.131522, -2.611959, 5.417125, 4.601712, -0.944981, 1.923583, 3.380219, 0.557484, -4.402332, -1.821094, -1.393727, -4.922822, -0.122208, 1.627198, -2.698848, 0.543942, 7.662592, 6.865628, -3.118904, -2.938929, -1.204334, 0.880950, -2.432030, 0.659579, -5.937887, -3.082295, -2.329674, -0.929039, -6.670197, 4.624016, 1.847857, -6.151865, -1.295520, 4.561952, -4.189340, -1.271205, -3.114521, -5.400505, 2.505269, -1.440362, -0.543130, 1.583606, -3.754081, 1.425768, 4.600868, 2.031716, 4.381232, 0.756865, 1.269595, 2.452235, -3.499653, 0.843460, 5.095802, -3.224932, -1.973590, -0.918793, -2.948713, -1.726048, 0.723157, -1.867589, -3.809328, -1.288017, -1.415320, 9.147604, 4.031230, -2.515405, 6.017965, -0.808787, 6.938612, -0.940846, 0.514639, 4.288442, 0.824478, 0.728683, 1.763265, 0.545236, 1.356232, 5.527868, 4.475016, -3.437302, 3.826543, 5.086852, 0.299217, 4.161529, -0.210087, -2.249442, -2.974098, -6.259721, -1.801933, -1.427602, 3.724947, 3.003747, 0.540840, 0.380978, -0.046774, -0.128234, -6.908992, 0.317432, -0.887537, 3.312163, -0.027360, 2.139082, 2.805890, -0.598460, 2.746891, 3.195262, -0.988383, -0.554773, -1.027135, 0.100964, -3.613074, 5.187125, 0.852667, -0.236328, 6.884120, 2.533068, 6.731904, -0.778514, 0.375824, -1.817388, -5.946012, -2.352830, 5.102455, -1.938184, -1.387404, -4.757504, -2.173933, -0.201531, -3.642277, -5.362231, 3.637059, 2.751554, -1.022320, 0.236827, -3.948135, 0.148865, -1.120026, 7.616945, -0.963430, -0.756691, -0.919667, 4.724895, 0.976052, 2.488966, -3.964747, -3.973845, -0.865214, 6.001307, -2.119917, 4.599735, -0.121487, 4.304985, -1.779739, -1.172909, -0.084037, -4.013283, 2.303892, 3.205657, 3.503476, 1.411669, 1.280174, 8.759250, 0.880316, -2.128790, -4.705835, -3.441471, 1.247740, 1.331217, -2.363749, -0.643617, 1.385127, -5.501627, -7.879881, 5.820022, -1.216314, -2.178036, -1.108843, 3.346485, -3.363919, 3.609971, 2.944907, -1.553591, 0.286604, -1.090472, 3.108707, -0.752903, 3.523269, 4.811034, 5.104235, -0.589819, -1.991645, 0.047370, 0.897678, 1.814540, -2.876569, 0.355313, -0.919469, 0.483877, 4.114913, -4.182666, -1.667883, -0.516528, 3.272009, 1.569783, 1.219359, -2.658862, 1.495385, -6.089864, -2.128447, 2.838350, 0.987129, 1.348928, -2.442407, 3.521696, -0.835038, -0.587476, -0.638110, 0.922019, 2.216001, 1.777817, 1.454185, 4.067125, 1.120436, 0.289011, -10.992048, 1.672040, -0.465922, 0.592983, 0.989681, 1.752660, -0.118764, -1.185995, 4.017984, -0.896778, 1.672302, -2.030242, -4.911530, -5.827189, 1.958246, -7.221211, -2.034618, 1.940285, -1.227662, 1.219494, 4.101542, -2.650769, -2.734393, -6.333424, 2.812163, 2.962675, -4.904971, 0.171724, 1.579389, 3.322502, -0.514203, 1.357068, 5.346297, 0.754811, 2.778355, 1.658731, 1.977090, -1.235332, 5.064486, 1.987611, 6.833618, 1.541649, 2.747869, -2.104572, -1.799155, 0.295171, -4.860865, -0.765238, -1.785131, 2.577568, 0.511845, 0.550628, -2.406874, -2.713742, -2.492991, -0.175184, -0.126996, -1.339363, 0.823035, 2.378861, -2.361350, 1.065711, 2.798916, -0.116275, -5.790211, 4.252931, -0.497029, 1.009890, -1.029344, 1.774927, -6.555468, 2.879664, 0.601462, -1.747875, 1.505472, 1.982710, 2.030839, -2.763254, -1.348869, -0.266905, 4.583949, 3.799467, 3.299871, -6.750926, -5.246260, 0.655915, 5.968640, 0.782474, -1.063692, 1.318094, 3.220077, -3.783729, -0.677141, 0.595987, -2.890465, -1.360495, -0.124686, -0.284035, -2.925553, 0.262872, 2.786888, 4.160244, 0.014939, 0.569116, 1.181584, -2.572151, -1.554686, 1.284756, -4.822996, 0.997169, -3.813555, 0.617060, 2.565912, 1.037117, -0.167606, 7.223352, -2.058073, -3.086306, 4.202200, 0.180718, 3.078883, -1.246702, -2.439780, -1.529070, 2.548267, 3.340835, -6.063392, -1.775246, -0.201369, 1.530203, -0.590155, -4.917980, -2.405702, -1.290209, -0.190081, 4.806739, 4.704314, 3.928058, 2.274630, -0.550331, 0.822520, 1.818900, 1.580151, -3.903281, -0.857364, -3.976201, -0.785557, 1.069647, -5.818671, 2.766473, 1.259937, 2.320919, -1.525085, 2.384599, 4.469339, 0.205931, -0.547983, 2.354457, -0.327334, -6.448855, -0.907820, 5.117530, -0.255594, -0.240747, -1.073850, -1.768409, -5.428776, 0.481095, -3.812433, -1.717587, -3.733663, 2.083388, 10.582418, -4.710001, 0.563532, -7.536781, -6.452072, 2.862794, -1.463039, 0.230586, -1.815489, 1.481611, 1.544786, 1.551767, -0.059472, 0.789418, -0.825706, 1.940825, 5.768491, -0.494084, -2.538201, -1.392608, -1.908880, -1.700553, 5.082959, 10.069242, 0.678830, -0.559667, 2.036356, 1.434546, -2.558431, 2.288113, 0.170179, 2.484860, -2.987775, -0.382073, -0.226642, -2.446935, -3.155464, -3.223151, 8.751858, 0.507641, -2.614630, 1.343406, -0.892121, -2.539207, -4.111343, 1.182877, -4.679571, -2.524526, -5.096138, -1.301956, 2.281354, -2.029088, 0.427091, 3.804364, 5.483494, -40.825272, -1.143677, -2.260099, -0.089059, -1.493274, -1.944442, -2.385689, 1.698596, -1.873040, -0.443924, 2.621414, 1.287797, -0.057905, -2.352337, 1.238259, 1.387846, 1.417577, 0.688409, 4.645106, 2.341688, 0.465816, -7.549288, -5.974134, 6.261028, -5.144201, -2.522077, -0.662837, 4.664794, -0.539391, -3.439147, 4.087629, -1.963182, 2.466938, 2.107603, -1.249598, -6.342202, -3.447833, -3.704789, -1.074862, 0.765216, -1.964713, -5.504405, 1.663442, -2.911618, -0.616493, -2.083917, 2.209042, 0.487668, 1.796368, -1.596250, 4.042785, 2.681681, 2.703725, -1.306239, 2.601149, -2.099759, -5.318709, -1.065740, 0.589329, -3.567276, 0.062211, -0.425451, -3.580077, -1.296509, -5.680976, -0.678171, -0.762381, 0.299684, 0.240645, -1.079323, -2.957309, 1.849129, 0.099766, 4.726409, -2.363460, -4.875949, 1.315865, 1.462351, -4.889824, 1.677129, -0.667212, -5.138101, 5.330096, -2.835877, -3.902706, 3.045518, -0.134772, 6.075926, 4.500954, 5.112908, -0.905090, -1.909402, 0.904969, 0.690467, -0.619620, 2.666605, 0.904155, -4.029288, 0.402307, -3.036969, -1.037772, -1.147319, -1.899678, 6.248024, 1.974826, 5.616739, 8.500142, 2.245967, -3.625181, -2.428448, -1.810710, 0.331268, -3.135995, 1.302269, 1.225240, 3.909755, -0.403967, -3.433037, -1.311207, -2.177593, -18.017111, 1.118036, -0.756810, -6.615274, 1.566789, -2.418086, 2.953141, -2.826252, -0.735017, 1.528646, -3.723853, -1.885919, -4.900423, -2.478693, -3.619526, -1.294044, 1.890493, -1.871122, 3.827642, 2.343641, -2.662672, 3.404760, -0.482313, 5.578103, -2.134901, 5.369441, 2.492493, 2.104894, -5.205935, -1.008856, 3.011516, -0.641482, 2.548511, -8.027479, 0.661283, -7.434981, 1.621635, 2.033537, -1.930799, -1.076036, -0.960546, -0.117069, -3.372064, -2.063029, 1.654161, 1.716305, 4.944497, -0.019142, -4.051437, -3.766835, 5.842964, -3.690862, -3.004082, -4.533800, -1.790580, -2.050976, 0.561251, 1.564617, -2.369565, 2.445515, 0.568967, 0.080670, 1.160943, -1.780555, -0.186662, 0.478309, -4.549850, -17.848110, -4.087846, -0.273795, 5.222282, 5.777647, 0.494441, -1.833347, 0.257310, 1.124015, -1.849150, -1.647193, -1.132576, -2.164145, 4.289577, 0.453893, 1.341683, 0.748200, -2.471226, 5.312476, 0.836187, 1.723641, 0.498636, 1.428005, 1.828248, -3.398380, -2.883598, -5.426562, 2.745332, 1.425643, 0.006252, -3.581231, -0.262443, -0.622419, -1.944646, -3.869738, 4.476512, -1.191560, 4.310348, -1.112125, 3.697780, -1.439761, -4.078893, 2.179358, 2.530555, -0.148791, 2.168117, 0.114731, 1.831150, 1.157180, 2.166352, 2.055449, -2.871636, 4.420805, -1.390404, -6.722301, -1.769241, 1.519223, -2.692869, 1.015409, 7.691347, 0.583279, -4.058763, -1.453730, 2.806040, 0.556832, 2.730164, -2.572670, 2.370497, 1.912015, 3.515903, 1.919521, 0.369309, -2.022592, -0.218010, -6.205793, -5.902825, 4.063861, -2.432916, 33.217953, -0.551205, 3.915136, -3.102407, 3.210474, -1.160345, -2.887495, 1.523478, -1.018772, -2.422345, 1.810487, -5.200218, 1.671701, -0.721633, 4.423970, -1.442638, -5.779491, -0.397599, -3.750891, 2.698724, -5.718669, -2.086298, -7.792905, 3.388355, -3.770853, -0.215906, 2.560900, -2.680165, -2.986103, -6.097780, 4.229813, -2.723863, -3.065816, 2.248178, -0.651940, -1.214029, 2.257283, 2.486542, 2.257068, 4.830973, -2.277972, -1.946075, 3.999813, 3.456074, 0.536131, 1.805786, 0.709500, -4.097561, -5.412663, 4.285580, -4.488030, 1.119286, 2.302969, 5.947579, -0.113177, -1.724484, -0.104090, 1.840069, 4.592496, 1.569632, -2.483522, -0.311809, -0.194584, -2.869655, -2.849751, 4.997175, 2.512164, 1.758786, 0.490910, 4.247337, 0.569464, -0.659693, -0.454139, 4.440271, 1.410414, -0.773389, -2.376852, -6.240281, -4.227293, -1.671386, 0.603003, 2.479946, -2.180943, 3.127448, 1.668763, 0.149739, -0.781542, 7.098460, 2.111330, -4.436161, -2.082523, -2.478790, 49.008598, 1.723189, 5.866392, 3.153973, -0.181730, -7.328019, -1.992281, -2.027254, 2.432771, 0.979790, -0.092340, 1.070737, 0.099132, 6.923749, -4.058023, -8.293395, -1.186165, -3.612353, 0.829185, -0.439513, -1.879160, -1.854795, -5.092451, -1.970279, -4.055690, -7.000295, -4.616829, -0.039388, 4.131409, 1.653316, -7.131061, -5.091387, 3.990695, 3.182390, -1.955781, 0.335848, 1.516590, -0.282827, 2.731199, -1.598010, 5.144428, 2.064570, 2.552294, 1.812606, -2.513990, 2.353807, 7.561320, -0.036158, -0.385612, -4.383836, -3.971509, 0.043287, 4.296186, 0.464179, -1.848343, -5.514888, -8.349360, -8.976414, 0.569201, -4.740505, -1.152213, 5.905676, -5.700530, 3.814966, 2.234695, -0.222299, 4.771212, -3.930703, 2.960777, 0.165897, -1.333587, 2.028787, -0.778738, -1.235554, -1.246747, -0.041303, 3.006766, 3.820721, -5.493714, -0.189577, -3.010185, -2.883236, 0.024274, -1.494312, -2.523447, -4.104508, -2.666597, 3.281853, 3.544635, 3.610449, -0.587052, -0.098227, 0.073750, 0.127269, -0.344085, 0.770601, -2.514192, -4.431854, -1.949836, -1.391094, -2.886564, 0.702709, -0.731393, -2.069504, 1.040801, 3.144605, -0.115405, 2.797428, -1.210172, 3.943987, 2.697510, 1.059327, 0.742219, 4.559521, 1.323614, -0.211233, -0.215014, 1.990037, 1.956536, -2.729569, -3.183045, 2.447714, 2.988651, 5.736022, -3.185564, -2.579111, 3.323712, 4.193132, 0.727872, 5.551432, -0.744345, -2.344970, -3.441572, 5.568137, -4.830486, 3.880134, -5.891489, -2.210011, 0.171487, -1.457955, -2.245640, 2.203776, 0.875658, 1.959566, 8.602451, 4.474270, 2.726350, -0.354296, -2.258420, -2.513044, 4.931829, 2.946697, 0.520752, 4.333879, -0.821970, 2.029501, -2.277242, -4.952377, -1.538025, -1.064874, 3.308603, 1.247338, 2.504045, -3.224561, -2.891461, 3.316285, 7.715738, -1.605185, 3.243022, -5.836881, 2.714357, -0.388968, -1.375407, -3.556258, -2.474644, 1.088091, -3.216538, -2.776881, -8.227687, 5.125639, 5.283110, 6.973928, 37.955891, 3.139960, -5.763666, 1.350248, -1.406692, -3.986167, 2.335608, 3.157670, 4.455172, 2.308789, 2.309107, -3.130976, -4.247937, -2.270962, 1.574682, 5.501863, 4.485770, -10.403132, 0.773148, -1.224575, 5.244150, -3.847297, -1.800287, 6.947053, -2.622383, 4.851179, 3.233861, 4.995902, -4.162736, -1.983544, 3.995517, 6.687618, -3.040680, -6.543100, 5.237826, -4.066863, -1.226237, 1.639323, -4.240799, -0.423647, -2.178972, 1.593127, -3.968011, 5.461793, 2.277310, -2.744098, -3.358952, 4.035235, -3.137929, 2.964127, 7.214087, -1.364202, -4.651634, 6.503850, 2.740077, 0.886651, 2.558752, 0.043657, -5.242559, -0.792984, -2.882527, -5.403186, 3.155617, 3.991355, -0.568835, 1.141021, -3.702596, -0.279778, -0.912082, -3.422060, -1.879990, -0.733728, 2.278115, -2.372427, -2.681229, -1.556143, -0.770521, 0.899023, 4.508717, -3.010523, -1.908418, -1.475849, 3.509949, 3.582316, -0.681324, -7.056856, 1.278887, 0.770643, -0.584454, 2.825036, -0.821392, 1.240007, 7.577941, 0.096160, -3.572768, 1.793741, 3.848424, 0.995781, 0.054073, -0.235984, -0.301966, 2.916689, 1.391444, 1.480205, -3.852633, -1.223310, -3.153660, 3.194536, 1.030388, -4.751256, 1.885638, -2.114385, -6.170693, 1.415366, 4.334911, -4.597432, -0.846934, -3.220494, 7.086485, 3.530614, -2.059356, 1.625898, -2.086513, -1.870273, 3.097336, -2.899379, 4.410741, -2.872575, 0.711264, 5.131933, 1.355852, -2.781525, 3.417987, -2.958153, -0.075279, 1.298355, -0.051176, 2.940977, 0.224740, 4.669080, -3.098835, -1.002103, -2.227010, -0.529945, 1.581029, -1.380316, -0.444712, 1.033249, -2.976829, 1.184253, -3.129241, -4.516237, 1.861109, 0.749785, -0.883805, 2.467990, -1.366948, -2.021918, 9.678317, 0.780995, 5.006135, 2.355187, 4.709684, 0.273654, 0.848736, -1.546677, 0.190088, -1.301066, -1.275526, -0.592527, 2.147533, 4.496069, 2.915365, -2.232816, -3.967589, 2.198257, 3.257962, -1.514701, -1.426605, -0.670297, 0.013288, -1.436101, -3.523503, 1.392464, -7.692491, 4.621131, 4.215765, -6.433372, 8.560532, -5.166466, 1.641891, 0.939057, -1.161749, -0.620703, 0.268485, 0.818933, -1.119831, -2.502043, 1.727721, -2.059391, 2.365984, -3.621562, 0.293849, 2.106696, 2.435274, -0.056334, 1.744176, -0.872306, -3.566818, -3.476950, -13.363617, -2.873412, -0.464867, 0.363577, 2.328689, -3.345537, -8.831505, -0.320157, 0.871519, -4.472153, -0.401718, -0.096133, 0.386455, -1.806300, 1.170147, 1.817253, -3.103240, -0.165836, 3.598591, 0.253634, 2.874893, 3.903717, 1.369801, 5.628721, 2.572816, 2.252870, -1.170226, -1.062752, -0.456143, -3.078095, -6.341308, 4.122602, 2.681516, 0.093924, 3.844733, 3.950950, -3.356711, -0.854817, 3.384428, 13.341409, -4.234371, 2.496033, -1.629354, -1.507377, 0.173900, 2.698258, 0.267310, 1.372167, 0.510639, 2.101537, 1.261789, 2.597848, 2.449376, -0.840756, -1.046823, -0.002541, -2.707355, 0.755944, 1.257627, 1.465595, 0.194379, 7.094273, 0.264183, 5.946443, -6.767539, 3.796284, -2.374620, -2.461099, -1.391973, -3.764230, -2.892979, 1.555705, -1.332523, -1.799763, -2.815821, -4.461638, 0.016501, -0.694294, -0.483337, 1.215167, 0.758542, 4.294414, -2.490471, -2.232953, 0.058542, -0.601252, 4.924240, 3.834749, 2.027948, 1.471411, 0.901828, 0.966709, 0.390534, 7.367773, 1.259584, -0.460943, -6.608699, -4.643585, 2.040176, -4.100492, -1.690598, 3.704851, -2.805748, 1.226204, 6.179536, -1.176640, 3.004384, 3.111051, 3.486775, -4.034345, 4.785528, 7.299822, -0.907930, -5.061491, -3.685342, -1.961922, -3.682397, -0.804982, -1.361447, -0.922343, -4.764962, -0.093592, 6.292956, 9.184029, -1.652289, -0.349144, 2.227105, 2.586640, -1.344222, -0.799372, -1.067394, 1.337438, -4.368445, -3.637234, 0.453892, -2.348681, 3.619864, 0.782791, -0.751199, -2.257583, -0.637428, -2.538015, -4.910626, 0.804029, 1.904546, 2.071048, -4.913878, 7.321102, 2.329751, -4.918932, 9.630699, 4.401609, 4.836075, -37.672218, -0.989470, -3.400167, -1.611549, -4.914345, -2.178263, -0.507991, 7.461368, 1.499078, 9.014656, 0.385251, -5.354575, -0.235111, 1.188299, 2.999619, 4.158133, -2.458529, 0.836758, -3.781934, 2.581318, -0.546479, -0.767487, -1.108148, 1.394199, -6.728708, -0.049908, 2.439957, 2.281411, -0.256135, 2.625314, 0.526615, 5.245579, -0.693234, -4.652315, -6.053971, 0.761856, 1.380219, 2.186297, 1.574326, 3.023049, -3.008269, -5.497355, 1.208180, 1.866580, 0.383009, -5.021918, -1.859053, -0.577350, 0.791435, 1.862842, -2.977753, -2.982435, -3.968694, -2.911249, -0.671418, -0.664998, -4.328650, 0.777280, -3.322049, -1.996393, -10.946344, 1.487512, 3.325927, 2.512698, -4.560525, 2.487353, -3.221335, 2.498028, 1.558956, -7.027573, -4.410831, 1.153316, -2.383291, 2.752918, -2.445125, 1.669130, 2.055993, 3.435565, -2.200916, 7.625603, -0.429790, -7.823750, 0.715926, 1.946298, 0.931786, -0.959709, -2.558171, 1.349378, 2.164468, -1.039176, -0.840470, -1.273818, -1.788402, -0.181308, 4.731328, -1.308624, -4.392232, -3.329735, 3.710122, -0.471847, 0.321802, -2.903600, 2.763454, 1.606396, 5.018901, -4.307723, -5.778127, 1.896904, -1.911034, -0.877533, -0.970224, -6.048330, -1.055960, -0.004360, 3.615595, 0.670895, -3.405262, -6.979682, 1.391201, -2.248485, -0.812938, -3.361127, -0.245261, -2.004265, 2.180632, -2.492358, 0.106926, -2.312480, -0.849407, -3.182122, -3.868700, 1.483516, -2.800287, 1.606942, 5.240799, -4.588236, 3.825581, 1.475691, 5.782595, 0.572563, 0.615392, -3.263512, 2.357608, 2.504305, -0.131105, -4.494020, -3.431349, 1.323660, -1.233401, 5.227014, 1.833779, 4.529938, 0.172702, 2.495649, 1.844747, -0.571699, 6.221216, 3.510139, 2.034399, -2.708287, 2.504865, 0.827564, -0.389984, 0.973059, -1.210372, 3.136084, -0.528568, -2.895905, -2.945923, -3.354907, -6.284786, 2.405454, -3.611116, -1.510917, -0.275045, -2.606582, 0.023821, -0.977702, 4.496226, 3.893765, -5.102979, 4.505174, -2.151086, -2.598680, 3.437677, -1.525504, -3.606384, 2.925199, -1.819598, -1.297506, -3.938237, 0.758268, -6.536557, 2.823950, -1.268384, -4.748070, -1.569543, -4.040869, -4.199319, -1.537972, -2.470041, -2.424089, 4.282330, 2.027144, -2.279008, 1.956247, 0.243131, -1.307445, 7.631332, 3.666424, 3.244223, 0.806356, 1.559336, -6.454810, -2.210939, 6.478909, 8.370586, -4.877525, 1.632561, 0.528654, 7.175992, 3.294758, -2.347389, -0.656673, 2.440072, 0.033162, -3.974950, 3.992282, 1.041370, -0.452447, 1.250809, 5.725498, 0.952154, -1.769804, -6.182436, -1.491944, 0.978641, -1.848162, -3.526021, 2.130781, -1.247035, 0.430292, 2.094477, 1.015652, -0.982822, -3.514047, 1.362636, -1.385784, 7.721636, 0.452686, -0.514129, -0.185515, -4.155198, -2.091428, -0.342165, 3.609978, 2.325354, -2.549308, -1.935902, -4.843817, 4.849944, 0.559908, 0.317363, -6.979578, 4.078461, -1.781615, -0.168588, -3.608682, 0.540197, -3.224210, -2.160827, -2.483818, -1.395126, 2.122100, 0.045637, -5.244937, 1.460025, -1.313215, -1.831590, -3.608512, 2.137432, 1.063356, -4.977770, -4.077838, -0.227993, -2.521503, -2.196034, 2.424290, 3.695994, -0.664042, 3.369231, -0.868323, -1.362390, 2.049519, 1.623333, -3.785452, -0.366721, 0.929944, 1.189942, 2.357296, -2.012867, 4.039164, 5.018045, 1.173269, 1.037018, 1.239052, -1.506088, 1.858684, -8.143816, -8.377998, -4.266102, 2.238616, 0.990164, -5.564924, 4.017815, 0.013090, -2.655519, -2.138789, 0.309147, 0.815738, -3.671095, -1.768397, 0.642272, -4.239707, -1.738047, -3.186784, 0.928931, -3.288824, 2.778164, 2.127247, 1.010160, -0.380883, -1.794696, -0.380974, 2.568671, 2.952252, -0.378777, 3.388682, 1.112046, 3.843087, -4.014625, -2.586132, -1.373591, 0.875761, -2.153665, -4.098034, 4.174786, -3.971316, 3.831267, 2.632764, -2.082423, 2.323014, 0.126692, -1.658272, 3.813766, 0.731241, 5.020086, -1.314192, -0.455286, 2.529340, 1.737364, -2.307101, 3.249065, 3.359480, 2.342399, 2.964352, 0.450493, 6.821503, -0.876081, 0.104224, -4.359997, 6.337316, 1.961707, 1.584909, 5.782849, -7.702838, 0.486187, -2.076824, 0.556629, -0.377218, 1.867574, -2.546370, 3.420962, 3.743823, 0.830919, -3.154605, 2.442055, -0.077978, -2.174074, 7.835155, 0.042525, 5.061534, -1.297285, 0.468504, -0.896670, -2.810953, -2.835888, -4.904580, 1.410659, 6.832096, -1.246026, -2.643165, 2.461378, 1.465698, -2.603248, 3.614096, 0.604979, -0.203376, 0.620558, 2.010493, -3.164634, 2.355133, 0.910522, -9.153761, -6.733269, 0.191007, -2.222224, -3.317362, 1.621990, -3.271222, -3.361245, -2.236486, -3.204127, -0.102103, -6.418847, 0.475259, 0.960290, 0.809332, 2.043769, 0.233913, 1.443959, 3.518958, 0.381731, 3.579408, -1.736145, 0.935460, 0.490841, -1.445789, -2.606658, -0.390318, 1.241003, 1.852338, 3.745817, -3.404975, -2.927765, -1.375834, 4.025413, -2.450278, 0.526100, 0.871327, -8.299149, -3.752483, 8.046568, -3.801634, -2.328504, -2.873852, 12.037412, 5.806139, 0.151948, 1.294600, 0.820967, 1.635987, 1.551868, -16.845692, -0.875519, -2.038996, 4.211426, -4.056794, 5.402084, -3.281292, 7.430756, 1.138992, -3.348103, -0.064485, 2.009815, 3.773692, 3.913712, 4.532695, 1.372892, -1.063987, 1.025051, 2.728509, 9.491262, 4.673415, 2.517928, 0.824100, 1.517722, -2.984458, 0.270283, -1.110453, -2.754004, -3.045094, 1.386551, -6.637009, -6.467078, 5.209693, 0.221588, 1.345757, -1.233974, 1.406563, -11.624565, -2.558307, 3.355196, -1.574055, 11.278919, -4.175966, -3.528599, -5.955521, 5.521868, -6.089912, 1.871637, 0.704803, -0.803571, -3.412199, 6.814842, -2.581755, 1.461685, -1.340520, 1.030475, 0.972731, -2.055774, 1.705598, 3.048807, -1.037460, 2.906647, -0.434317, -0.054622, -3.738979, -4.256308, -4.723948, -0.522000, 4.060871, -4.338176, -24.310600, 3.466682, -0.756065, -4.103305, 1.243985, -2.868507, 0.153364, -0.084377, 3.205434, -1.317540, -0.319762, -0.802727, 2.456871, 3.382993, 4.963143, -0.354739, -2.737841, 2.374343, -1.887784, 1.198460, -7.294523, -2.117028, 46.393829, 2.054618, -1.339482, 2.647988, 1.026573, -0.775336, -1.399183, -6.095242, 2.313640, -2.377140, -2.874966, 1.462420, 1.763429, -4.175523, 0.322729, 0.076063, -0.492210, 5.528355, 2.494064, 3.575900, -4.731384, 1.914505, 0.780838, -0.513604, 0.834317, 4.637135, 0.748915, -2.242534, 5.284515, 0.941967, -4.771373, 12.168940, -2.509539, 2.931109, 6.130014, 2.304723, -0.401287, 1.846620, 2.191286, 0.522786, -2.212207, -2.330939, 3.671052, -3.602032, -8.045081, 1.841934, -15.802244, -4.091495, 2.758163, 3.801088, -0.116542, 4.200107, 3.809458, 2.594901, 8.304158, 2.272768, -3.162868, -4.738754, -1.238095, -2.981887, -0.739677, -8.554259, -1.512607, -3.465696, -0.641592, -5.622955, 0.712421, -0.786531, -3.055792, -1.708393, -1.229299, -2.440192, -2.387127, -1.278995, 0.990231, 0.675825, 7.402359, 2.521222, 0.413226, -1.471801, -0.305300, -2.968605, -0.631904, 2.190862, 1.293332, 0.988720, 1.396958, 1.053343, 0.096911, 5.328904]
+}
diff --git a/integration/utils_test.go b/integration/utils_test.go
new file mode 100644
index 0000000..19f4d1b
--- /dev/null
+++ b/integration/utils_test.go
@@ -0,0 +1,372 @@
+//go:build integration
+
+package integration
+
+import (
+	"bytes"
+	"context"
+	"errors"
+	"fmt"
+	"io"
+	"log/slog"
+	"math/rand"
+	"net"
+	"net/http"
+	"net/url"
+	"os"
+	"path/filepath"
+	"runtime"
+	"strconv"
+	"strings"
+	"sync"
+	"testing"
+	"time"
+
+	"github.com/ollama/ollama/api"
+	"github.com/ollama/ollama/app/lifecycle"
+	"github.com/ollama/ollama/format"
+	"github.com/stretchr/testify/require"
+)
+
+const (
+	smol = "llama3.2:1b"
+)
+
+func Init() {
+	lifecycle.InitLogging()
+}
+
+func FindPort() string {
+	port := 0
+	if a, err := net.ResolveTCPAddr("tcp", "localhost:0"); err == nil {
+		var l *net.TCPListener
+		if l, err = net.ListenTCP("tcp", a); err == nil {
+			port = l.Addr().(*net.TCPAddr).Port
+			l.Close()
+		}
+	}
+	if port == 0 {
+		port = rand.Intn(65535-49152) + 49152 // get a random port in the ephemeral range
+	}
+	return strconv.Itoa(port)
+}
+
+func GetTestEndpoint() (*api.Client, string) {
+	defaultPort := "11434"
+	ollamaHost := os.Getenv("OLLAMA_HOST")
+
+	scheme, hostport, ok := strings.Cut(ollamaHost, "://")
+	if !ok {
+		scheme, hostport = "http", ollamaHost
+	}
+
+	// trim trailing slashes
+	hostport = strings.TrimRight(hostport, "/")
+
+	host, port, err := net.SplitHostPort(hostport)
+	if err != nil {
+		host, port = "127.0.0.1", defaultPort
+		if ip := net.ParseIP(strings.Trim(hostport, "[]")); ip != nil {
+			host = ip.String()
+		} else if hostport != "" {
+			host = hostport
+		}
+	}
+
+	if os.Getenv("OLLAMA_TEST_EXISTING") == "" && port == defaultPort {
+		port = FindPort()
+	}
+
+	slog.Info("server connection", "host", host, "port", port)
+
+	return api.NewClient(
+		&url.URL{
+			Scheme: scheme,
+			Host:   net.JoinHostPort(host, port),
+		},
+		http.DefaultClient), fmt.Sprintf("%s:%s", host, port)
+}
+
+var serverMutex sync.Mutex
+var serverReady bool
+
+func startServer(t *testing.T, ctx context.Context, ollamaHost string) error {
+	// Make sure the server has been built
+	CLIName, err := filepath.Abs("../ollama")
+	if err != nil {
+		return err
+	}
+
+	if runtime.GOOS == "windows" {
+		CLIName += ".exe"
+	}
+	_, err = os.Stat(CLIName)
+	if err != nil {
+		return fmt.Errorf("CLI missing, did you forget to build first?  %w", err)
+	}
+	serverMutex.Lock()
+	defer serverMutex.Unlock()
+	if serverReady {
+		return nil
+	}
+
+	if tmp := os.Getenv("OLLAMA_HOST"); tmp != ollamaHost {
+		slog.Info("setting env", "OLLAMA_HOST", ollamaHost)
+		t.Setenv("OLLAMA_HOST", ollamaHost)
+	}
+
+	slog.Info("starting server", "url", ollamaHost)
+	done, err := lifecycle.SpawnServer(ctx, "../ollama")
+	if err != nil {
+		return fmt.Errorf("failed to start server: %w", err)
+	}
+
+	go func() {
+		<-ctx.Done()
+		serverMutex.Lock()
+		defer serverMutex.Unlock()
+		exitCode := <-done
+		if exitCode > 0 {
+			slog.Warn("server failure", "exit", exitCode)
+		}
+		serverReady = false
+	}()
+
+	// TODO wait only long enough for the server to be responsive...
+	time.Sleep(500 * time.Millisecond)
+
+	serverReady = true
+	return nil
+}
+
+func PullIfMissing(ctx context.Context, client *api.Client, modelName string) error {
+	slog.Info("checking status of model", "model", modelName)
+	showReq := &api.ShowRequest{Name: modelName}
+
+	showCtx, cancel := context.WithDeadlineCause(
+		ctx,
+		time.Now().Add(20*time.Second),
+		fmt.Errorf("show for existing model %s took too long", modelName),
+	)
+	defer cancel()
+	_, err := client.Show(showCtx, showReq)
+	var statusError api.StatusError
+	switch {
+	case errors.As(err, &statusError) && statusError.StatusCode == http.StatusNotFound:
+		break
+	case err != nil:
+		return err
+	default:
+		slog.Info("model already present", "model", modelName)
+		return nil
+	}
+	slog.Info("model missing", "model", modelName)
+
+	stallDuration := 60 * time.Second // This includes checksum verification, which can take a while on larger models, and slower systems
+	stallTimer := time.NewTimer(stallDuration)
+	fn := func(resp api.ProgressResponse) error {
+		// fmt.Print(".")
+		if !stallTimer.Reset(stallDuration) {
+			return errors.New("stall was detected, aborting status reporting")
+		}
+		return nil
+	}
+
+	stream := true
+	pullReq := &api.PullRequest{Name: modelName, Stream: &stream}
+
+	var pullError error
+
+	done := make(chan int)
+	go func() {
+		pullError = client.Pull(ctx, pullReq, fn)
+		done <- 0
+	}()
+
+	select {
+	case <-stallTimer.C:
+		return errors.New("download stalled")
+	case <-done:
+		return pullError
+	}
+}
+
+var serverProcMutex sync.Mutex
+
+// Returns an Client, the testEndpoint, and a cleanup function, fails the test on errors
+// Starts the server if needed
+func InitServerConnection(ctx context.Context, t *testing.T) (*api.Client, string, func()) {
+	client, testEndpoint := GetTestEndpoint()
+	if os.Getenv("OLLAMA_TEST_EXISTING") == "" {
+		serverProcMutex.Lock()
+		fp, err := os.CreateTemp("", "ollama-server-*.log")
+		if err != nil {
+			t.Fatalf("failed to generate log file: %s", err)
+		}
+		lifecycle.ServerLogFile = fp.Name()
+		fp.Close()
+		require.NoError(t, startServer(t, ctx, testEndpoint))
+	}
+
+	return client, testEndpoint, func() {
+		if os.Getenv("OLLAMA_TEST_EXISTING") == "" {
+			defer serverProcMutex.Unlock()
+			if t.Failed() {
+				fp, err := os.Open(lifecycle.ServerLogFile)
+				if err != nil {
+					slog.Error("failed to open server log", "logfile", lifecycle.ServerLogFile, "error", err)
+					return
+				}
+				defer fp.Close()
+				data, err := io.ReadAll(fp)
+				if err != nil {
+					slog.Error("failed to read server log", "logfile", lifecycle.ServerLogFile, "error", err)
+					return
+				}
+				slog.Warn("SERVER LOG FOLLOWS")
+				os.Stderr.Write(data)
+				slog.Warn("END OF SERVER")
+			}
+			err := os.Remove(lifecycle.ServerLogFile)
+			if err != nil && !os.IsNotExist(err) {
+				slog.Warn("failed to cleanup", "logfile", lifecycle.ServerLogFile, "error", err)
+			}
+		}
+	}
+}
+
+func GenerateTestHelper(ctx context.Context, t *testing.T, genReq api.GenerateRequest, anyResp []string) {
+	client, _, cleanup := InitServerConnection(ctx, t)
+	defer cleanup()
+	require.NoError(t, PullIfMissing(ctx, client, genReq.Model))
+	DoGenerate(ctx, t, client, genReq, anyResp, 30*time.Second, 10*time.Second)
+}
+
+func DoGenerate(ctx context.Context, t *testing.T, client *api.Client, genReq api.GenerateRequest, anyResp []string, initialTimeout, streamTimeout time.Duration) {
+	stallTimer := time.NewTimer(initialTimeout)
+	var buf bytes.Buffer
+	fn := func(response api.GenerateResponse) error {
+		// fmt.Print(".")
+		buf.Write([]byte(response.Response))
+		if !stallTimer.Reset(streamTimeout) {
+			return errors.New("stall was detected while streaming response, aborting")
+		}
+		return nil
+	}
+
+	stream := true
+	genReq.Stream = &stream
+	done := make(chan int)
+	var genErr error
+	go func() {
+		genErr = client.Generate(ctx, &genReq, fn)
+		done <- 0
+	}()
+
+	select {
+	case <-stallTimer.C:
+		if buf.Len() == 0 {
+			t.Errorf("generate never started.  Timed out after :%s", initialTimeout.String())
+		} else {
+			t.Errorf("generate stalled.  Response so far:%s", buf.String())
+		}
+	case <-done:
+		require.NoError(t, genErr, "failed with %s request prompt %s ", genReq.Model, genReq.Prompt)
+		// Verify the response contains the expected data
+		response := buf.String()
+		atLeastOne := false
+		for _, resp := range anyResp {
+			if strings.Contains(strings.ToLower(response), resp) {
+				atLeastOne = true
+				break
+			}
+		}
+		require.True(t, atLeastOne, "%s: none of %v found in %s", genReq.Model, anyResp, response)
+		slog.Info("test pass", "model", genReq.Model, "prompt", genReq.Prompt, "contains", anyResp, "response", response)
+	case <-ctx.Done():
+		t.Error("outer test context done while waiting for generate")
+	}
+}
+
+// Generate a set of requests
+// By default each request uses llama3.2 as the model
+func GenerateRequests() ([]api.GenerateRequest, [][]string) {
+	return []api.GenerateRequest{
+			{
+				Model:     smol,
+				Prompt:    "why is the ocean blue?",
+				Stream:    &stream,
+				KeepAlive: &api.Duration{Duration: 10 * time.Second},
+				Options: map[string]any{
+					"seed":        42,
+					"temperature": 0.0,
+				},
+			}, {
+				Model:     smol,
+				Prompt:    "why is the color of dirt brown?",
+				Stream:    &stream,
+				KeepAlive: &api.Duration{Duration: 10 * time.Second},
+				Options: map[string]any{
+					"seed":        42,
+					"temperature": 0.0,
+				},
+			}, {
+				Model:     smol,
+				Prompt:    "what is the origin of the us thanksgiving holiday?",
+				Stream:    &stream,
+				KeepAlive: &api.Duration{Duration: 10 * time.Second},
+				Options: map[string]any{
+					"seed":        42,
+					"temperature": 0.0,
+				},
+			}, {
+				Model:     smol,
+				Prompt:    "what is the origin of independence day?",
+				Stream:    &stream,
+				KeepAlive: &api.Duration{Duration: 10 * time.Second},
+				Options: map[string]any{
+					"seed":        42,
+					"temperature": 0.0,
+				},
+			}, {
+				Model:     smol,
+				Prompt:    "what is the composition of air?",
+				Stream:    &stream,
+				KeepAlive: &api.Duration{Duration: 10 * time.Second},
+				Options: map[string]any{
+					"seed":        42,
+					"temperature": 0.0,
+				},
+			},
+		},
+		[][]string{
+			{"sunlight"},
+			{"soil", "organic", "earth", "black", "tan"},
+			{"england", "english", "massachusetts", "pilgrims", "british"},
+			{"fourth", "july", "declaration", "independence"},
+			{"nitrogen", "oxygen", "carbon", "dioxide"},
+		}
+}
+
+func skipUnderMinVRAM(t *testing.T, gb uint64) {
+	// TODO use info API in the future
+	if s := os.Getenv("OLLAMA_MAX_VRAM"); s != "" {
+		maxVram, err := strconv.ParseUint(s, 10, 64)
+		require.NoError(t, err)
+		// Don't hammer on small VRAM cards...
+		if maxVram < gb*format.GibiByte {
+			t.Skip("skipping with small VRAM to avoid timeouts")
+		}
+	}
+}
+
+func getTimeouts(t *testing.T) (soft time.Duration, hard time.Duration) {
+	deadline, hasDeadline := t.Deadline()
+	if !hasDeadline {
+		return 8 * time.Minute, 10 * time.Minute
+	} else if deadline.Compare(time.Now().Add(2*time.Minute)) <= 0 {
+		t.Skip("too little time")
+		return time.Duration(0), time.Duration(0)
+	}
+	return -time.Since(deadline.Add(-2 * time.Minute)), -time.Since(deadline.Add(-20 * time.Second))
+}
diff --git a/kvcache/cache.go b/kvcache/cache.go
new file mode 100644
index 0000000..405c797
--- /dev/null
+++ b/kvcache/cache.go
@@ -0,0 +1,77 @@
+package kvcache
+
+import (
+	"errors"
+
+	"github.com/ollama/ollama/ml"
+	"github.com/ollama/ollama/model/input"
+)
+
+var (
+	ErrKvCacheFull  = errors.New("could not find a kv cache slot")
+	ErrNotSupported = errors.New("model does not support operation")
+)
+
+type Cache interface {
+	// ** used by model implementations **
+
+	// SetLayer sets the active layer of the cache
+	SetLayer(layer int)
+
+	// Get returns the history of key and value tensors plus a mask
+	//
+	// The shape of the tensors is documented in the specific
+	// cache implementation used.
+	Get(ctx ml.Context) (ml.Tensor, ml.Tensor, ml.Tensor)
+
+	// Put stores a batch of key and value in the cache
+	//
+	// The shape of the tensors is documented in the specific
+	// cache implementation used.
+	Put(ctx ml.Context, key, value ml.Tensor)
+
+	// SetConfig controls optimizations (mostly backend-specific) that may transform
+	// the output of the cache to work better with specific kernels. If not called,
+	// the backend settings will be used. This works well when calling Attention.
+	//
+	// The config can be overridden by models, especially if they require vanilla
+	// output when implementing their own version of attention. To do this, pass
+	// an empty ml.CacheConfig.
+	//
+	// Most models will not need to use this.
+	SetConfig(ml.CacheConfig)
+
+	// ** cache management **
+
+	// Init sets up runtime parameters.
+	// backend: Used to allocate cache data storage and execute management operations (such as defrag)
+	// dtype: The data type for storing cache entries
+	// maxSequences: The maximum number of sequences stored in the cache - across all batches
+	// capacity: The number of cache entries to store, per sequence
+	// maxBatch: The maximum number of tokens that can occur in a single batch
+	Init(backend ml.Backend, dtype ml.DType, maxSequences, capacity, maxBatch int)
+
+	// Close closes the cache and frees resources associated with it
+	Close()
+
+	// StartForward is called before the start of the model's forward pass.
+	// For each token in the coming batch, there must be a corresponding
+	// entry in positions and seqs. reserve is to preallocate memory
+	// without actually storing data in the cache.
+	StartForward(ctx ml.Context, batch input.Batch, reserve bool) error
+
+	// CopyPrefix copies tokens in the range [0, len) from srcSeq to dstSeq
+	CopyPrefix(srcSeq, dstSeq int, len int32)
+
+	// CanResume returns true if the cache can continue with the next token at
+	// the given position and sequence. Assumes that the caller has already
+	// verified the contents of the cache.
+	CanResume(seq int, pos int32) bool
+
+	// Remove deletes tokens in the range [beginIndex, endIndex) from seq. Set
+	// endIndex to math.MaxInt32 to remove everything starting at beginIndex.
+	//
+	// If an error occurs, the entire context for the sequence should be
+	// removed by calling Remove(seq, 0, math.MaxInt32)
+	Remove(seq int, beginIndex, endIndex int32) error
+}
diff --git a/kvcache/causal.go b/kvcache/causal.go
new file mode 100644
index 0000000..9bc1d5d
--- /dev/null
+++ b/kvcache/causal.go
@@ -0,0 +1,739 @@
+package kvcache
+
+import (
+	"errors"
+	"fmt"
+	"log/slog"
+	"math"
+	"slices"
+
+	"github.com/ollama/ollama/ml"
+	"github.com/ollama/ollama/model/input"
+)
+
+type shiftFn func(ctx ml.Context, layer int, key, shift ml.Tensor) (ml.Tensor, error)
+
+// Causal cache stores K and V tensors according to their position in the
+// sequence. Returns the history and a mask for attending to past tokens
+//
+// The tensors are of shape embed dim, kv heads, batch size
+// The mask is of shape history size, batch size
+type Causal struct {
+	DType      ml.DType
+	windowSize int32
+	chunkSize  int32
+
+	opts CausalOptions
+
+	// config controls mostly backend-specific optimizations
+	config *ml.CacheConfig
+
+	// ** current forward pass **
+
+	// the active layer for Get and Put
+	curLayer int
+
+	// starting location for data storage for this batch
+	curLoc int
+
+	// size of the current batch
+	curBatchSize int
+
+	// mask of the cache as used by this batch
+	curMask ml.Tensor
+
+	// locations in the cache that are needed for this batch
+	curCellRange cellRange
+
+	// curSequences is the sequences corresponding to this pass's entries in the cache
+	curSequences []int
+
+	// curPositions is the positions corresponding to this pass's entries in the cache
+	curPositions []int32
+
+	// ** cache metadata **
+
+	// for each possible location in the cache, stores the position and set of sequences
+	// that reference the data there
+	cells []cacheCell
+
+	// maps from sequence to the range of locations where it is stored in the cache
+	cellRanges map[int]cellRange
+
+	// ** cache data storage **
+
+	shiftFn      shiftFn
+	backend      ml.Backend
+	ctxs         map[int]ml.Context
+	keys, values map[int]ml.Tensor
+}
+
+type cacheCell struct {
+	pos       int32
+	sequences []int
+}
+
+type cellRange struct {
+	min int
+	max int
+}
+
+func NewCausalCache(shift shiftFn) *Causal {
+	return &Causal{
+		windowSize: math.MaxInt32,
+		shiftFn:    shift,
+		ctxs:       make(map[int]ml.Context),
+		keys:       make(map[int]ml.Tensor),
+		values:     make(map[int]ml.Tensor),
+	}
+}
+
+func NewSWACache(windowSize int32, shift shiftFn) *Causal {
+	return &Causal{
+		windowSize: windowSize,
+		shiftFn:    shift,
+		ctxs:       make(map[int]ml.Context),
+		keys:       make(map[int]ml.Tensor),
+		values:     make(map[int]ml.Tensor),
+	}
+}
+
+func NewChunkedAttentionCache(chunkSize int32, shift shiftFn) *Causal {
+	return &Causal{
+		windowSize: math.MaxInt32,
+		chunkSize:  chunkSize,
+		shiftFn:    shift,
+		ctxs:       make(map[int]ml.Context),
+		keys:       make(map[int]ml.Tensor),
+		values:     make(map[int]ml.Tensor),
+	}
+}
+
+func (c *Causal) Init(backend ml.Backend, dtype ml.DType, maxSequences, capacity, maxBatch int) {
+	if c.config == nil {
+		var config ml.CacheConfig
+		if cc, ok := backend.(ml.BackendCacheConfig); ok {
+			config = cc.CacheConfig()
+		}
+		c.config = &config
+	}
+
+	if c.config.CachePadding == 0 {
+		c.config.CachePadding = 1
+	}
+
+	if c.config.MaskBatchPadding == 0 {
+		c.config.MaskBatchPadding = 1
+	}
+
+	if c.config.MaskDType == ml.DTypeOther {
+		c.config.MaskDType = ml.DTypeF32
+	}
+
+	var cacheSize int
+	if c.windowSize == math.MaxInt32 || capacity < int(c.windowSize) {
+		cacheSize = maxSequences * capacity
+	} else {
+		cacheSize = (maxSequences * int(c.windowSize)) + maxBatch
+	}
+	cacheSize = roundUp(cacheSize, c.config.CachePadding)
+	c.cells = make([]cacheCell, cacheSize)
+
+	c.DType = dtype
+	c.cellRanges = make(map[int]cellRange)
+	c.backend = backend
+}
+
+func (c *Causal) SetConfig(config ml.CacheConfig) {
+	if c.config != nil {
+		panic("config cannot be changed after being previously set, either by the model or backend")
+	}
+
+	c.config = &config
+}
+
+func (c *Causal) Close() {
+	for _, ctx := range c.ctxs {
+		ctx.Close()
+	}
+}
+
+func (c *Causal) StartForward(ctx ml.Context, batch input.Batch, reserve bool) error {
+	c.curBatchSize = len(batch.Positions)
+	c.curSequences = batch.Sequences
+	c.curPositions = batch.Positions
+	c.opts.Except = nil
+
+	if !reserve {
+		c.updateSlidingWindow()
+
+		var err error
+		c.curLoc, err = c.findStartLoc()
+		if errors.Is(err, ErrKvCacheFull) {
+			c.defrag()
+			c.curLoc, err = c.findStartLoc()
+		}
+		if err != nil {
+			return err
+		}
+
+		c.curCellRange = newRange()
+		for i, pos := range batch.Positions {
+			seq := batch.Sequences[i]
+
+			c.cells[c.curLoc+i] = cacheCell{pos: pos, sequences: []int{seq}}
+
+			seqRange, ok := c.cellRanges[seq]
+			if !ok {
+				seqRange = newRange()
+			}
+
+			if c.curLoc+i > seqRange.max {
+				seqRange.max = c.curLoc + i
+			}
+			if seqRange.max > c.curCellRange.max {
+				c.curCellRange.max = seqRange.max
+			}
+
+			if c.curLoc+i < seqRange.min {
+				seqRange.min = c.curLoc + i
+			}
+			if seqRange.min < c.curCellRange.min {
+				c.curCellRange.min = seqRange.min
+			}
+			c.cellRanges[seq] = seqRange
+		}
+	} else {
+		// If we are reserving memory, don't update any of the cache metadata but set the size
+		// to the worst case.
+		c.curLoc = 0
+		c.curCellRange.min = 0
+		c.curCellRange.max = len(c.cells) - 1
+	}
+
+	var err error
+	c.curMask, err = c.buildMask(ctx)
+
+	return err
+}
+
+func newRange() cellRange {
+	return cellRange{
+		min: math.MaxInt,
+		max: 0,
+	}
+}
+
+// Find the first contiguous block of at least curBatchSize
+func (c *Causal) findStartLoc() (int, error) {
+	var start, count int
+	for i := range c.cells {
+		if len(c.cells[i].sequences) == 0 {
+			count++
+			if count >= c.curBatchSize {
+				return start, nil
+			}
+		} else {
+			start = i + 1
+			count = 0
+		}
+	}
+
+	return 0, fmt.Errorf("%w (cache: %v batch: %v)", ErrKvCacheFull, len(c.cells), c.curBatchSize)
+}
+
+func (c *Causal) updateSlidingWindow() {
+	if c.windowSize == math.MaxInt32 {
+		return
+	}
+
+	// create a map of unique sequences to the lowest position in that sequence
+	lowestPos := make(map[int]int32)
+	for i := range c.curPositions {
+		seq := c.curSequences[i]
+
+		pos, ok := lowestPos[seq]
+		if !ok {
+			pos = c.curPositions[i]
+		} else if c.curPositions[i] < pos {
+			pos = c.curPositions[i]
+		}
+
+		lowestPos[seq] = pos
+	}
+
+	// delete any entries that are beyond the window of the oldest position in the sequence
+	for seq, pos := range lowestPos {
+		oldRange, ok := c.cellRanges[seq]
+		if !ok {
+			continue
+		}
+
+		newRange := newRange()
+
+		for i := oldRange.min; i <= oldRange.max; i++ {
+			if slices.Contains(c.cells[i].sequences, seq) {
+				if c.cells[i].pos < pos-c.windowSize {
+					c.cells[i].sequences = slices.DeleteFunc(c.cells[i].sequences, func(s int) bool { return s == seq })
+				} else {
+					newRange.min = min(newRange.min, i)
+					newRange.max = max(newRange.max, i)
+				}
+			}
+		}
+
+		c.cellRanges[seq] = newRange
+	}
+}
+
+func roundDown(length, pad int) int {
+	return (length / pad) * pad
+}
+
+func roundUp(length, pad int) int {
+	return ((length + pad - 1) / pad) * pad
+}
+
+// Builds a mask of history x batch indicating whether for each token in the batch the
+// token in the history should apply. This is based on both the sequence and causality (the
+// position of the history is not ahead of the token in the batch).
+func (c *Causal) buildMask(ctx ml.Context) (ml.Tensor, error) {
+	// Align and pad the two dimensions as required by the backend
+	batchSize := roundUp(c.curBatchSize, c.config.MaskBatchPadding)
+
+	c.curCellRange.min = roundDown(c.curCellRange.min, c.config.CachePadding)
+	c.curCellRange.max = roundUp(c.curCellRange.max+1, c.config.CachePadding) - 1
+
+	length := c.curCellRange.max - c.curCellRange.min + 1
+	mask := make([]float32, batchSize*length)
+
+	for i := range c.curBatchSize {
+		enabled := !slices.Contains(c.opts.Except, i)
+		for j := c.curCellRange.min; j <= c.curCellRange.max; j++ {
+			if !slices.Contains(c.cells[j].sequences, c.curSequences[i]) ||
+				(enabled && c.cells[j].pos > c.curPositions[i]) ||
+				c.chunkSize > 0 && c.cells[j].pos < c.curPositions[i]-c.curPositions[i]%c.chunkSize ||
+				c.cells[j].pos < c.curPositions[i]-c.windowSize {
+				mask[i*length+(j-c.curCellRange.min)] = float32(math.Inf(-1))
+			}
+		}
+	}
+
+	// Mask out any padding tokens we added. For padding that we added to the cache history, this
+	// has already been masked out because the sequence doesn't match.
+	for i := c.curBatchSize * length; i < len(mask); i++ {
+		mask[i] = float32(math.Inf(-1))
+	}
+
+	maskTensor, err := ctx.Input().FromFloatSlice(mask, length, batchSize)
+	if err != nil {
+		return nil, err
+	}
+
+	if c.config.MaskDType != ml.DTypeF32 {
+		out := ctx.Input().Empty(c.config.MaskDType, maskTensor.Shape()...)
+		ctx.Forward(maskTensor.Copy(ctx, out))
+		maskTensor = out
+	}
+
+	return maskTensor, nil
+}
+
+func (c *Causal) moveCells(ctx ml.Context, src, dst, length int) {
+	for i, key := range c.keys {
+		if key == nil {
+			continue
+		}
+
+		kHeadDim := key.Dim(0)
+		numKVHeads := key.Dim(1)
+		rowSize := key.Stride(2)
+
+		kSrcView := key.View(ctx, rowSize*src, kHeadDim*numKVHeads*length)
+		kDstView := key.View(ctx, rowSize*dst, kHeadDim*numKVHeads*length)
+
+		value := c.values[i]
+		var vSrcView, vDstView ml.Tensor
+		if c.config.PermutedV {
+			vHeadDim := value.Dim(1)
+			elemSize := value.Stride(0)
+
+			vSrcView = value.View(ctx, elemSize*src, length, len(c.cells)*elemSize, vHeadDim*numKVHeads)
+			vDstView = value.View(ctx, elemSize*dst, length, len(c.cells)*elemSize, vHeadDim*numKVHeads)
+		} else {
+			vHeadDim := value.Dim(0)
+			rowSize := value.Stride(2)
+
+			vSrcView = value.View(ctx, rowSize*src, vHeadDim*numKVHeads*length)
+			vDstView = value.View(ctx, rowSize*dst, vHeadDim*numKVHeads*length)
+		}
+
+		ctx.Forward(
+			kSrcView.Copy(ctx, kDstView),
+			vSrcView.Copy(ctx, vDstView),
+		)
+	}
+}
+
+func (c *Causal) defrag() {
+	slog.Debug("defragmenting kv cache")
+
+	// Defrag strategy:
+	// - Search for empty holes at the beginning of the cache,
+	//   filling them with active data starting at the end
+	// - If there are contiguous elements that need to be moved,
+	//   combine them into a single operation by holding new moves
+	//   until we see that the next one is non-contiguous
+	// - Fill up the context with the maximum number of operations it
+	//   can hold then compute that and continue with a new context
+	//
+	// We could try to optimize placement by grouping blocks from
+	// the same sequences together but most likely the next forward
+	// pass will disrupt this anyways, so the real world benefit
+	// seems limited as this time.
+
+	ctx := c.backend.NewContext()
+
+	// For every move, 6 tensors are required per layer (2 views and a
+	// copy for each of k and v). We also need to refer to the original
+	// k and v cache tensors - once per layer, not per move.
+	layers := 0
+	for _, key := range c.keys {
+		if key == nil {
+			continue
+		}
+		layers++
+	}
+
+	maxMoves := (ctx.MaxGraphNodes() - 2*layers) / (6 * layers)
+	moves := 0
+
+	var pendingSrc, pendingDst, pendingLen int
+	src := len(c.cells) - 1
+
+	for dst := 0; dst < src; dst++ {
+		if len(c.cells[dst].sequences) == 0 {
+			for ; src > dst; src-- {
+				if len(c.cells[src].sequences) != 0 {
+					c.cells[dst] = c.cells[src]
+					c.cells[src] = cacheCell{}
+
+					if pendingLen > 0 {
+						if src == pendingSrc-pendingLen && dst == pendingDst+pendingLen {
+							pendingSrc = src
+							pendingLen++
+							break
+						} else {
+							c.moveCells(ctx, pendingSrc, pendingDst, pendingLen)
+							moves++
+						}
+					}
+
+					pendingSrc = src
+					pendingDst = dst
+					pendingLen = 1
+
+					break
+				}
+			}
+		}
+
+		if moves >= maxMoves {
+			ctx.Compute()
+			ctx.Close()
+			ctx = c.backend.NewContext()
+
+			moves = 0
+		}
+	}
+
+	if pendingLen > 0 {
+		c.moveCells(ctx, pendingSrc, pendingDst, pendingLen)
+		moves++
+	}
+
+	if moves > 0 {
+		ctx.Compute()
+	}
+	ctx.Close()
+
+	// Reset range metadata
+	for seq := range c.cellRanges {
+		seqRange := newRange()
+
+		for i, cell := range c.cells {
+			if slices.Contains(cell.sequences, seq) {
+				if i < seqRange.min {
+					seqRange.min = i
+				}
+				if i > seqRange.max {
+					seqRange.max = i
+				}
+			}
+		}
+
+		c.cellRanges[seq] = seqRange
+	}
+}
+
+func (c *Causal) SetLayer(layer int) {
+	c.curLayer = layer
+}
+
+type CausalOptions struct {
+	// Enabled controls whether the causal mask is generated for a particular index in a batch
+	Except []int
+}
+
+// SetCausal disables causal mask generation for a particular range of indicies in
+// the current batch for subsequent calls to Get. The state resets for the next forward pass.
+func (c *Causal) SetCausal(ctx ml.Context, opts CausalOptions) {
+	if !slices.Equal(c.opts.Except, opts.Except) {
+		c.opts = opts
+		if ctx != nil {
+			var err error
+			c.curMask, err = c.buildMask(ctx)
+			if err != nil {
+				// This error should never occur because we have previously built a mask with the same shape
+				panic(fmt.Errorf("SetCausal: %w", err))
+			}
+		}
+	}
+}
+
+func (c *Causal) Get(ctx ml.Context) (ml.Tensor, ml.Tensor, ml.Tensor) {
+	key := c.keys[c.curLayer]
+	value := c.values[c.curLayer]
+
+	kHeadDim := key.Dim(0)
+	numKVHeads := key.Dim(1)
+	rowSize := key.Stride(2)
+	cachedSize := c.curMask.Dim(0)
+
+	key = key.View(ctx, rowSize*c.curCellRange.min,
+		kHeadDim, key.Stride(1),
+		numKVHeads, key.Stride(2),
+		cachedSize,
+	)
+
+	if c.config.PermutedV {
+		vHeadDim := value.Dim(1)
+		elemSize := value.Stride(0)
+
+		value = value.View(ctx, elemSize*c.curCellRange.min,
+			cachedSize, value.Stride(1),
+			vHeadDim, value.Stride(2),
+			numKVHeads,
+		)
+	} else {
+		vHeadDim := value.Dim(0)
+		rowSize := value.Stride(2)
+
+		value = value.View(ctx, rowSize*c.curCellRange.min,
+			vHeadDim, value.Stride(1),
+			numKVHeads, value.Stride(2),
+			cachedSize,
+		)
+	}
+
+	return key, value, c.curMask
+}
+
+func (c *Causal) Put(ctx ml.Context, key, value ml.Tensor) {
+	kHeadDim := key.Dim(0)
+	vHeadDim := value.Dim(0)
+	numKVHeads := key.Dim(1)
+	batchSize := key.Dim(2)
+
+	if c.curBatchSize != batchSize {
+		panic(fmt.Errorf("inconsistent batch sizes (layer: %v, batch size: %v layer batch size: %v)", c.curLayer, c.curBatchSize, batchSize))
+	}
+
+	if _, ok := c.ctxs[c.curLayer]; !ok {
+		c.ctxs[c.curLayer] = c.backend.NewContextSize(2).Layer(c.curLayer)
+	}
+
+	if _, ok := c.keys[c.curLayer]; !ok {
+		c.keys[c.curLayer] = c.ctxs[c.curLayer].Zeros(c.DType, kHeadDim, numKVHeads, len(c.cells))
+	}
+
+	if _, ok := c.values[c.curLayer]; !ok {
+		if c.config.PermutedV {
+			c.values[c.curLayer] = c.ctxs[c.curLayer].Zeros(c.DType, len(c.cells), vHeadDim, numKVHeads)
+		} else {
+			c.values[c.curLayer] = c.ctxs[c.curLayer].Zeros(c.DType, vHeadDim, numKVHeads, len(c.cells))
+		}
+	}
+
+	rowSize := c.keys[c.curLayer].Stride(2)
+	ctx.Forward(key.Copy(ctx, c.keys[c.curLayer].View(ctx, rowSize*c.curLoc, kHeadDim*numKVHeads*batchSize)))
+
+	if c.config.PermutedV {
+		elemSize := c.values[c.curLayer].Stride(0)
+
+		value = value.Permute(ctx, 1, 2, 0, 3)
+		ctx.Forward(value.Copy(ctx, c.values[c.curLayer].View(ctx, elemSize*c.curLoc, batchSize, len(c.cells)*elemSize, vHeadDim*numKVHeads)))
+	} else {
+		rowSize := c.values[c.curLayer].Stride(2)
+
+		ctx.Forward(value.Copy(ctx, c.values[c.curLayer].View(ctx, rowSize*c.curLoc, vHeadDim*numKVHeads*batchSize)))
+	}
+}
+
+func (c *Causal) CopyPrefix(srcSeq, dstSeq int, len int32) {
+	seqRange := newRange()
+
+	for i := range c.cells {
+		// Remove the contents of dstSeq so that we only have the copied prefix, metadata will be reset at the end
+		if slices.Contains(c.cells[i].sequences, dstSeq) {
+			c.cells[i].sequences = slices.DeleteFunc(c.cells[i].sequences, func(s int) bool { return s == dstSeq })
+		}
+
+		if slices.Contains(c.cells[i].sequences, srcSeq) && c.cells[i].pos < len {
+			c.cells[i].sequences = append(c.cells[i].sequences, dstSeq)
+			if i < seqRange.min {
+				seqRange.min = i
+			}
+			if i > seqRange.max {
+				seqRange.max = i
+			}
+		}
+	}
+
+	c.cellRanges[dstSeq] = seqRange
+}
+
+func (c *Causal) CanResume(seq int, pos int32) bool {
+	if c.windowSize == math.MaxInt32 {
+		return true
+	}
+
+	seqRange, ok := c.cellRanges[seq]
+	if !ok {
+		return false
+	}
+
+	// for sliding window, check that the window of the new sequence is contained in
+	// the window of what we are storing
+	var last int32 = -1
+	for i := seqRange.min; i <= seqRange.max; i++ {
+		if slices.Contains(c.cells[i].sequences, seq) {
+			last = max(last, c.cells[i].pos)
+		}
+	}
+
+	if last == -1 {
+		return false
+	}
+
+	lastWindowStart := max(0, last-c.windowSize)
+	posWindowStart := max(0, pos-c.windowSize)
+
+	return posWindowStart >= lastWindowStart
+}
+
+func (c *Causal) shift(seq int, beginIndex, offset int32) error {
+	if c.shiftFn == nil {
+		return ErrNotSupported
+	}
+
+	ctx := c.backend.NewContext()
+	defer ctx.Close()
+
+	seqRange := c.cellRanges[seq]
+	size := seqRange.max - seqRange.min + 1
+
+	offsets := make([]int32, size)
+	for i := range offsets {
+		cell := c.cells[seqRange.min+i]
+
+		if slices.Contains(cell.sequences, seq) && cell.pos >= beginIndex {
+			offsets[i] = offset
+		}
+	}
+
+	kShift, err := ctx.Input().FromIntSlice(offsets, len(offsets))
+	if err != nil {
+		return err
+	}
+
+	for i, key := range c.keys {
+		if key == nil {
+			continue
+		}
+
+		kHeadDim := key.Dim(0)
+		numKVHeads := key.Dim(1)
+		rowSize := key.Stride(2)
+
+		key = key.View(ctx, rowSize*seqRange.min,
+			kHeadDim, key.Stride(1),
+			numKVHeads, key.Stride(2),
+			size,
+		)
+
+		roped, err := c.shiftFn(ctx, i, key, kShift)
+		if err != nil {
+			return err
+		}
+
+		ctx.Forward(roped.Copy(ctx, key))
+	}
+
+	ctx.Compute()
+
+	return nil
+}
+
+func (c *Causal) Remove(seq int, beginIndex, endIndex int32) error {
+	// TODO(jessegross): We should check to see if removing the middle of the sequence will
+	// cause the sliding window to encompass tokens that we no longer have. If so, then we
+	// should return an error, which will trigger the runner to evaluate the full history and
+	// rebuild the window. However, if we have multimodal inputs in our history, this reuse
+	// results in use after free, so we don't do it for now.
+
+	var offset int32
+	if endIndex != math.MaxInt32 {
+		offset = beginIndex - endIndex
+	}
+
+	seqRange := newRange()
+
+	for i := range c.cells {
+		if slices.Contains(c.cells[i].sequences, seq) {
+			if c.cells[i].pos >= beginIndex && c.cells[i].pos < endIndex {
+				c.cells[i].sequences = slices.DeleteFunc(c.cells[i].sequences, func(s int) bool { return s == seq })
+			} else {
+				if c.cells[i].pos >= endIndex {
+					if slices.ContainsFunc(c.cells[i].sequences, func(s int) bool { return s != seq }) {
+						return errors.New("shifting cells shared by multiple sequences not supported")
+					}
+
+					c.cells[i].pos += offset
+				}
+				if i < seqRange.min {
+					seqRange.min = i
+				}
+				if i > seqRange.max {
+					seqRange.max = i
+				}
+			}
+		}
+	}
+
+	if seqRange == newRange() {
+		delete(c.cellRanges, seq)
+		return nil
+	}
+
+	c.cellRanges[seq] = seqRange
+
+	if endIndex != math.MaxInt32 {
+		err := c.shift(seq, endIndex+offset, offset)
+		if err != nil {
+			return err
+		}
+	}
+
+	return nil
+}
diff --git a/kvcache/causal_test.go b/kvcache/causal_test.go
new file mode 100644
index 0000000..7969870
--- /dev/null
+++ b/kvcache/causal_test.go
@@ -0,0 +1,598 @@
+package kvcache
+
+import (
+	"math"
+	"slices"
+	"testing"
+
+	"github.com/ollama/ollama/ml"
+	"github.com/ollama/ollama/model/input"
+)
+
+type testCase struct {
+	name          string
+	in            []float32
+	inShape       []int
+	seqs          []int
+	pos           []int32
+	expected      []float32
+	expectedShape []int
+	expectedMask  []float32
+}
+
+func TestStore(t *testing.T) {
+	backend := &testBackend{}
+	cache := NewCausalCache(nil)
+	defer cache.Close()
+
+	cache.Init(backend, ml.DTypeF16, 1, 16, 16)
+
+	tests := []testCase{
+		{
+			name:          "FirstBatch",
+			in:            []float32{111, 211, 121, 221, 131, 231, 112, 212, 122, 222, 132, 232, 113, 213, 123, 223, 133, 233, 114, 214, 124, 224, 134, 234},
+			inShape:       []int{2, 3, 4},
+			seqs:          []int{0, 0, 0, 0},
+			pos:           []int32{0, 1, 2, 3},
+			expected:      []float32{111, 211, 121, 221, 131, 231, 112, 212, 122, 222, 132, 232, 113, 213, 123, 223, 133, 233, 114, 214, 124, 224, 134, 234},
+			expectedShape: []int{2, 3, 4},
+			expectedMask:  []float32{0, float32(math.Inf(-1)), float32(math.Inf(-1)), float32(math.Inf(-1)), 0, 0, float32(math.Inf(-1)), float32(math.Inf(-1)), 0, 0, 0, float32(math.Inf(-1)), 0, 0, 0, 0},
+		},
+		{
+			name:          "SecondBatch",
+			in:            []float32{115, 215, 125, 225, 135, 235},
+			inShape:       []int{2, 3, 1},
+			seqs:          []int{0},
+			pos:           []int32{4},
+			expected:      []float32{111, 211, 121, 221, 131, 231, 112, 212, 122, 222, 132, 232, 113, 213, 123, 223, 133, 233, 114, 214, 124, 224, 134, 234, 115, 215, 125, 225, 135, 235},
+			expectedShape: []int{2, 3, 5},
+			expectedMask:  []float32{0, 0, 0, 0, 0},
+		},
+	}
+
+	testCache(t, backend, cache, tests)
+}
+
+func TestSWA(t *testing.T) {
+	backend := &testBackend{}
+	cache := NewSWACache(1, nil)
+	defer cache.Close()
+
+	cache.Init(backend, ml.DTypeF16, 1, 16, 16)
+
+	tests := []testCase{
+		{
+			name:          "FirstBatch",
+			in:            []float32{1, 2, 3, 4},
+			inShape:       []int{1, 1, 4},
+			seqs:          []int{0, 0, 0, 0},
+			pos:           []int32{0, 1, 2, 3},
+			expected:      []float32{1, 2, 3, 4},
+			expectedShape: []int{1, 1, 4},
+			expectedMask:  []float32{0, float32(math.Inf(-1)), float32(math.Inf(-1)), float32(math.Inf(-1)), 0, 0, float32(math.Inf(-1)), float32(math.Inf(-1)), float32(math.Inf(-1)), 0, 0, float32(math.Inf(-1)), float32(math.Inf(-1)), float32(math.Inf(-1)), 0, 0},
+		},
+		{
+			name:          "SecondBatch",
+			in:            []float32{5, 6},
+			inShape:       []int{1, 1, 2},
+			seqs:          []int{0, 0},
+			pos:           []int32{4, 5},
+			expected:      []float32{5, 6, 3, 4},
+			expectedShape: []int{1, 1, 4},
+			expectedMask:  []float32{0, float32(math.Inf(-1)), float32(math.Inf(-1)), 0, 0, 0, float32(math.Inf(-1)), float32(math.Inf(-1))},
+		},
+	}
+
+	testCache(t, backend, cache, tests)
+}
+
+func TestChunkedAttention(t *testing.T) {
+	cache := NewChunkedAttentionCache(2, nil)
+	defer cache.Close()
+
+	var b testBackend
+	cache.Init(&b, ml.DTypeF16, 1, 16, 16)
+
+	x := float32(math.Inf(-1))
+
+	testCache(
+		t, &b, cache,
+		[]testCase{
+			{
+				name:          "FirstBatch",
+				in:            []float32{1, 2, 3, 4},
+				inShape:       []int{1, 1, 4},
+				seqs:          []int{0, 0, 0, 0},
+				pos:           []int32{0, 1, 2, 3},
+				expected:      []float32{1, 2, 3, 4},
+				expectedShape: []int{1, 1, 4},
+				expectedMask: []float32{
+					0, x, x, x,
+					0, 0, x, x,
+					x, x, 0, x,
+					x, x, 0, 0,
+				},
+			},
+			{
+				name:          "SecondBatch",
+				in:            []float32{5, 6, 7},
+				inShape:       []int{1, 1, 3},
+				seqs:          []int{0, 0, 0},
+				pos:           []int32{4, 5, 6},
+				expected:      []float32{1, 2, 3, 4, 5, 6, 7},
+				expectedShape: []int{1, 1, 7},
+				expectedMask: []float32{
+					x, x, x, x, 0, x, x,
+					x, x, x, x, 0, 0, x,
+					x, x, x, x, x, x, 0,
+				},
+			},
+			{
+				name:          "ThirdBatch",
+				in:            []float32{8, 9},
+				inShape:       []int{1, 1, 2},
+				seqs:          []int{0, 0},
+				pos:           []int32{7, 8},
+				expected:      []float32{1, 2, 3, 4, 5, 6, 7, 8, 9},
+				expectedShape: []int{1, 1, 9},
+				expectedMask: []float32{
+					x, x, x, x, x, x, 0, 0, x,
+					x, x, x, x, x, x, x, x, 0,
+				},
+			},
+		},
+	)
+}
+
+func TestSequences(t *testing.T) {
+	backend := &testBackend{}
+	cache := NewCausalCache(nil)
+	defer cache.Close()
+
+	cache.Init(backend, ml.DTypeF16, 1, 16, 16)
+
+	tests := []testCase{
+		{
+			name:          "FirstBatch",
+			in:            []float32{1, 2, 3, 4},
+			inShape:       []int{1, 1, 4},
+			seqs:          []int{0, 0, 1, 1},
+			pos:           []int32{0, 1, 0, 1},
+			expected:      []float32{1, 2, 3, 4},
+			expectedShape: []int{1, 1, 4},
+			expectedMask:  []float32{0, float32(math.Inf(-1)), float32(math.Inf(-1)), float32(math.Inf(-1)), 0, 0, float32(math.Inf(-1)), float32(math.Inf(-1)), float32(math.Inf(-1)), float32(math.Inf(-1)), 0, float32(math.Inf(-1)), float32(math.Inf(-1)), float32(math.Inf(-1)), 0, 0},
+		},
+		{
+			name:          "SecondBatch",
+			in:            []float32{5, 6},
+			inShape:       []int{1, 1, 2},
+			seqs:          []int{0, 1},
+			pos:           []int32{2, 2},
+			expected:      []float32{1, 2, 3, 4, 5, 6},
+			expectedShape: []int{1, 1, 6},
+			expectedMask:  []float32{0, 0, float32(math.Inf(-1)), float32(math.Inf(-1)), 0, float32(math.Inf(-1)), float32(math.Inf(-1)), float32(math.Inf(-1)), 0, 0, float32(math.Inf(-1)), 0},
+		},
+	}
+
+	testCache(t, backend, cache, tests)
+}
+
+func TestRemove(t *testing.T) {
+	backend := &testBackend{}
+	cache := NewCausalCache(func(ctx ml.Context, layer int, key, shift ml.Tensor) (ml.Tensor, error) {
+		return key.Add(ctx, shift), nil
+	})
+	defer cache.Close()
+
+	cache.Init(backend, ml.DTypeF16, 1, 16, 16)
+
+	tests := []testCase{
+		{
+			name:          "FirstBatch",
+			in:            []float32{1, 2, 3, 4},
+			inShape:       []int{1, 1, 4},
+			seqs:          []int{0, 0, 1, 1},
+			pos:           []int32{0, 1, 0, 1},
+			expected:      []float32{1, 2, 3, 4},
+			expectedShape: []int{1, 1, 4},
+			expectedMask:  []float32{0, float32(math.Inf(-1)), float32(math.Inf(-1)), float32(math.Inf(-1)), 0, 0, float32(math.Inf(-1)), float32(math.Inf(-1)), float32(math.Inf(-1)), float32(math.Inf(-1)), 0, float32(math.Inf(-1)), float32(math.Inf(-1)), float32(math.Inf(-1)), 0, 0},
+		},
+	}
+
+	testCache(t, backend, cache, tests)
+
+	err := cache.Remove(0, 1, math.MaxInt32)
+	if err != nil {
+		panic(err)
+	}
+
+	tests = []testCase{
+		{
+			name:          "RemoveEnd",
+			in:            []float32{5, 6},
+			inShape:       []int{1, 1, 2},
+			seqs:          []int{0, 1},
+			pos:           []int32{1, 2},
+			expected:      []float32{1, 2, 3, 4, 5, 6},
+			expectedShape: []int{1, 1, 6},
+			expectedMask:  []float32{0, float32(math.Inf(-1)), float32(math.Inf(-1)), float32(math.Inf(-1)), 0, float32(math.Inf(-1)), float32(math.Inf(-1)), float32(math.Inf(-1)), 0, 0, float32(math.Inf(-1)), 0},
+		},
+	}
+
+	testCache(t, backend, cache, tests)
+
+	err = cache.Remove(0, 0, 1)
+	if err != nil {
+		panic(err)
+	}
+
+	tests = []testCase{
+		{
+			name:          "RemoveMiddle",
+			in:            []float32{7, 8},
+			inShape:       []int{1, 1, 2},
+			seqs:          []int{0, 0},
+			pos:           []int32{1, 2},
+			expected:      []float32{7, 8, 3, 4, 4},
+			expectedShape: []int{1, 1, 5},
+			expectedMask:  []float32{0, float32(math.Inf(-1)), float32(math.Inf(-1)), float32(math.Inf(-1)), 0, 0, 0, float32(math.Inf(-1)), float32(math.Inf(-1)), 0},
+		},
+	}
+
+	testCache(t, backend, cache, tests)
+}
+
+func TestDefrag(t *testing.T) {
+	backend := &testBackend{}
+	cache := NewCausalCache(func(ctx ml.Context, layer int, key, shift ml.Tensor) (ml.Tensor, error) {
+		return key.Add(ctx, shift), nil
+	})
+	defer cache.Close()
+
+	cache.Init(backend, ml.DTypeF16, 1, 16, 16)
+
+	tests := []testCase{
+		{
+			name:          "FirstBatch",
+			in:            []float32{1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16},
+			inShape:       []int{1, 1, 16},
+			seqs:          []int{0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0},
+			pos:           []int32{0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15},
+			expected:      []float32{1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16},
+			expectedShape: []int{1, 1, 16},
+			expectedMask:  []float32{0, float32(math.Inf(-1)), float32(math.Inf(-1)), float32(math.Inf(-1)), float32(math.Inf(-1)), float32(math.Inf(-1)), float32(math.Inf(-1)), float32(math.Inf(-1)), float32(math.Inf(-1)), float32(math.Inf(-1)), float32(math.Inf(-1)), float32(math.Inf(-1)), float32(math.Inf(-1)), float32(math.Inf(-1)), float32(math.Inf(-1)), float32(math.Inf(-1)), 0, 0, float32(math.Inf(-1)), float32(math.Inf(-1)), float32(math.Inf(-1)), float32(math.Inf(-1)), float32(math.Inf(-1)), float32(math.Inf(-1)), float32(math.Inf(-1)), float32(math.Inf(-1)), float32(math.Inf(-1)), float32(math.Inf(-1)), float32(math.Inf(-1)), float32(math.Inf(-1)), float32(math.Inf(-1)), float32(math.Inf(-1)), 0, 0, 0, float32(math.Inf(-1)), float32(math.Inf(-1)), float32(math.Inf(-1)), float32(math.Inf(-1)), float32(math.Inf(-1)), float32(math.Inf(-1)), float32(math.Inf(-1)), float32(math.Inf(-1)), float32(math.Inf(-1)), float32(math.Inf(-1)), float32(math.Inf(-1)), float32(math.Inf(-1)), float32(math.Inf(-1)), 0, 0, 0, 0, float32(math.Inf(-1)), float32(math.Inf(-1)), float32(math.Inf(-1)), float32(math.Inf(-1)), float32(math.Inf(-1)), float32(math.Inf(-1)), float32(math.Inf(-1)), float32(math.Inf(-1)), float32(math.Inf(-1)), float32(math.Inf(-1)), float32(math.Inf(-1)), float32(math.Inf(-1)), 0, 0, 0, 0, 0, float32(math.Inf(-1)), float32(math.Inf(-1)), float32(math.Inf(-1)), float32(math.Inf(-1)), float32(math.Inf(-1)), float32(math.Inf(-1)), float32(math.Inf(-1)), float32(math.Inf(-1)), float32(math.Inf(-1)), float32(math.Inf(-1)), float32(math.Inf(-1)), 0, 0, 0, 0, 0, 0, float32(math.Inf(-1)), float32(math.Inf(-1)), float32(math.Inf(-1)), float32(math.Inf(-1)), float32(math.Inf(-1)), float32(math.Inf(-1)), float32(math.Inf(-1)), float32(math.Inf(-1)), float32(math.Inf(-1)), float32(math.Inf(-1)), 0, 0, 0, 0, 0, 0, 0, float32(math.Inf(-1)), float32(math.Inf(-1)), float32(math.Inf(-1)), float32(math.Inf(-1)), float32(math.Inf(-1)), float32(math.Inf(-1)), float32(math.Inf(-1)), float32(math.Inf(-1)), float32(math.Inf(-1)), 0, 0, 0, 0, 0, 0, 0, 0, float32(math.Inf(-1)), float32(math.Inf(-1)), float32(math.Inf(-1)), float32(math.Inf(-1)), float32(math.Inf(-1)), float32(math.Inf(-1)), float32(math.Inf(-1)), float32(math.Inf(-1)), 0, 0, 0, 0, 0, 0, 0, 0, 0, float32(math.Inf(-1)), float32(math.Inf(-1)), float32(math.Inf(-1)), float32(math.Inf(-1)), float32(math.Inf(-1)), float32(math.Inf(-1)), float32(math.Inf(-1)), 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, float32(math.Inf(-1)), float32(math.Inf(-1)), float32(math.Inf(-1)), float32(math.Inf(-1)), float32(math.Inf(-1)), float32(math.Inf(-1)), 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, float32(math.Inf(-1)), float32(math.Inf(-1)), float32(math.Inf(-1)), float32(math.Inf(-1)), float32(math.Inf(-1)), 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, float32(math.Inf(-1)), float32(math.Inf(-1)), float32(math.Inf(-1)), float32(math.Inf(-1)), 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, float32(math.Inf(-1)), float32(math.Inf(-1)), float32(math.Inf(-1)), 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, float32(math.Inf(-1)), float32(math.Inf(-1)), 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, float32(math.Inf(-1)), 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0},
+		},
+	}
+
+	testCache(t, backend, cache, tests)
+
+	err := cache.Remove(0, 2, 4)
+	if err != nil {
+		panic(err)
+	}
+
+	err = cache.Remove(0, 13, math.MaxInt32)
+	if err != nil {
+		panic(err)
+	}
+
+	tests = []testCase{
+		{
+			name:          "Defrag",
+			in:            []float32{17, 18, 19},
+			inShape:       []int{1, 1, 3},
+			seqs:          []int{0, 0, 0},
+			pos:           []int32{16, 17, 18},
+			expected:      []float32{1, 2, 12, 13, 3, 4, 5, 6, 7, 8, 9, 10, 11, 17, 18, 19},
+			expectedShape: []int{1, 1, 16},
+			expectedMask:  []float32{0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, float32(math.Inf(-1)), float32(math.Inf(-1)), 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, float32(math.Inf(-1)), 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0},
+		},
+	}
+
+	testCache(t, backend, cache, tests)
+}
+
+func TestCopy(t *testing.T) {
+	backend := &testBackend{}
+	cache := NewCausalCache(func(ctx ml.Context, layer int, key, shift ml.Tensor) (ml.Tensor, error) { return key, nil })
+	defer cache.Close()
+
+	cache.Init(backend, ml.DTypeF16, 1, 16, 16)
+
+	tests := []testCase{
+		{
+			name:          "FirstBatch",
+			in:            []float32{1, 2, 3, 4},
+			inShape:       []int{1, 1, 4},
+			seqs:          []int{0, 0, 0, 0},
+			pos:           []int32{0, 1, 2, 3},
+			expected:      []float32{1, 2, 3, 4},
+			expectedShape: []int{1, 1, 4},
+			expectedMask:  []float32{0, float32(math.Inf(-1)), float32(math.Inf(-1)), float32(math.Inf(-1)), 0, 0, float32(math.Inf(-1)), float32(math.Inf(-1)), 0, 0, 0, float32(math.Inf(-1)), 0, 0, 0, 0},
+		},
+	}
+
+	testCache(t, backend, cache, tests)
+
+	cache.CopyPrefix(0, 1, 2)
+
+	tests = []testCase{
+		{
+			name:          "Copy",
+			in:            []float32{5, 6},
+			inShape:       []int{1, 1, 2},
+			seqs:          []int{1, 1},
+			pos:           []int32{3, 4},
+			expected:      []float32{1, 2, 3, 4, 5, 6},
+			expectedShape: []int{1, 1, 6},
+			expectedMask:  []float32{0, 0, float32(math.Inf(-1)), float32(math.Inf(-1)), 0, float32(math.Inf(-1)), 0, 0, float32(math.Inf(-1)), float32(math.Inf(-1)), 0, 0},
+		},
+	}
+
+	testCache(t, backend, cache, tests)
+}
+
+func testCache(t *testing.T, backend ml.Backend, cache Cache, tests []testCase) {
+	for _, test := range tests {
+		t.Run(test.name, func(t *testing.T) {
+			context := backend.NewContext()
+			defer context.Close()
+
+			err := cache.StartForward(context, input.Batch{Positions: test.pos, Sequences: test.seqs}, false)
+			if err != nil {
+				panic(err)
+			}
+
+			cache.SetLayer(0)
+			tensor, _ := context.FromFloatSlice(test.in, test.inShape...)
+			cache.Put(context, tensor, tensor)
+
+			out, _, mask := cache.Get(context)
+
+			context.Forward(out, mask).Compute(out, mask)
+
+			if !slices.Equal(out.Floats(), test.expected) {
+				t.Errorf("TestCache: have %v; want %v", out.Floats(), test.expected)
+			}
+
+			if !slices.Equal(out.Shape(), test.expectedShape) {
+				t.Errorf("TestCache: has shape %v; want %v", out.Shape(), test.expectedShape)
+			}
+
+			if !slices.Equal(mask.Floats(), test.expectedMask) {
+				t.Errorf("TestCache: have mask: have %v want %v", mask.Floats(), test.expectedMask)
+			}
+		})
+	}
+}
+
+func TestCanResume(t *testing.T) {
+	backend := &testBackend{}
+	windowSize := int32(4)
+	cache := NewSWACache(windowSize, nil)
+	defer cache.Close()
+
+	cache.Init(backend, ml.DTypeF16, 1, 16, 16)
+
+	context := backend.NewContext()
+	defer context.Close()
+
+	err := cache.StartForward(context, input.Batch{
+		Positions: []int32{0, 1, 2, 3},
+		Sequences: []int{0, 0, 0, 0},
+	}, false)
+	if err != nil {
+		t.Fatalf("StartForward failed: %v", err)
+	}
+
+	cache.SetLayer(0)
+	tensor, _ := context.FromFloatSlice([]float32{1, 2, 3, 4}, 1, 1, 4)
+	cache.Put(context, tensor, tensor)
+
+	// with window size 4, nothing has slid out of the window yet
+	if !cache.CanResume(0, 0) {
+		t.Errorf("CanResume(0, 0) = false, want true (within window)")
+	}
+	if !cache.CanResume(0, 1) {
+		t.Errorf("CanResume(0, 1) = false, want true (within window)")
+	}
+	if !cache.CanResume(0, 2) {
+		t.Errorf("CanResume(0, 2) = false, want true (within window)")
+	}
+	if !cache.CanResume(0, 3) {
+		t.Errorf("CanResume(0, 3) = false, want true (latest position)")
+	}
+
+	// shift window by adding position 4
+	err = cache.StartForward(context, input.Batch{
+		Positions: []int32{4, 5},
+		Sequences: []int{0, 0},
+	}, false)
+	if err != nil {
+		t.Fatalf("StartForward failed: %v", err)
+	}
+
+	cache.SetLayer(0)
+	tensor, _ = context.FromFloatSlice([]float32{5, 6}, 1, 1, 2)
+	cache.Put(context, tensor, tensor)
+
+	// only the latest position has overlapping windows
+	if cache.CanResume(0, 0) {
+		t.Errorf("after shift: CanResume(0, 0) = true, want false (outside window)")
+	}
+	if cache.CanResume(0, 1) {
+		t.Errorf("after shift: CanResume(0, 1) = true, want false (outside window)")
+	}
+	if cache.CanResume(0, 2) {
+		t.Errorf("after shift: CanResume(0, 2) = true, want false (outside window)")
+	}
+	if cache.CanResume(0, 3) {
+		t.Errorf("after shift: CanResume(0, 3) = true, want false (outside window)")
+	}
+	if cache.CanResume(0, 4) {
+		t.Errorf("after shift: CanResume(0, 4) = true, want false (outside window)")
+	}
+	if !cache.CanResume(0, 5) {
+		t.Errorf("after shift: CanResume(0, 5) = false, want true (latest position)")
+	}
+}
+
+type testBackend struct {
+	ml.Backend
+}
+
+func (b *testBackend) NewContext() ml.Context {
+	return &testContext{}
+}
+
+func (b *testBackend) NewContextSize(int) ml.Context {
+	return &testContext{}
+}
+
+type testContext struct {
+	ml.Context
+}
+
+func (c *testContext) Empty(dtype ml.DType, shape ...int) ml.Tensor {
+	total := 0
+
+	if len(shape) > 0 {
+		total = 1
+		for _, s := range shape {
+			total *= s
+		}
+	}
+
+	return &testTensor{dtype: dtype, elementSize: 4, data: make([]float32, total), shape: shape}
+}
+
+func (c *testContext) Zeros(dtype ml.DType, shape ...int) ml.Tensor {
+	return c.Empty(dtype, shape...)
+}
+
+func (c *testContext) FromFloatSlice(s []float32, shape ...int) (ml.Tensor, error) {
+	t := c.Empty(ml.DTypeF32, shape...).(*testTensor)
+
+	copy(t.data, s)
+
+	return t, nil
+}
+
+func (c *testContext) FromIntSlice(s []int32, shape ...int) (ml.Tensor, error) {
+	f := make([]float32, len(s))
+	for i := range f {
+		f[i] = float32(s[i])
+	}
+
+	out, _ := c.FromFloatSlice(f, shape...)
+	out.(*testTensor).dtype = ml.DTypeI32
+
+	return out, nil
+}
+
+func (c *testContext) Arange(start, stop, step float32, dtype ml.DType) ml.Tensor {
+	s := make([]float32, 0, int((stop-start)/step))
+	for i := start; i < stop; i += step {
+		s = append(s, i)
+	}
+
+	out, _ := c.FromFloatSlice(s, len(s))
+	out.(*testTensor).dtype = dtype
+	return out
+}
+
+func (c *testContext) Input() ml.Context    { return c }
+func (c *testContext) Layer(int) ml.Context { return c }
+
+func (c *testContext) Forward(...ml.Tensor) ml.Context { return c }
+
+func (c *testContext) Compute(...ml.Tensor) {}
+
+func (c *testContext) Reserve() error { return nil }
+
+func (c *testContext) MaxGraphNodes() int {
+	return 10
+}
+
+func (c *testContext) Close() {}
+
+type testTensor struct {
+	ml.Tensor
+
+	dtype       ml.DType
+	elementSize int
+	data        []float32
+	shape       []int
+}
+
+func (t *testTensor) Dim(n int) int {
+	return t.shape[n]
+}
+
+func (t *testTensor) Stride(n int) int {
+	stride := t.elementSize
+	for i := range n {
+		stride *= t.shape[i]
+	}
+
+	return stride
+}
+
+func (t *testTensor) Shape() []int {
+	return t.shape
+}
+
+func (t *testTensor) DType() ml.DType {
+	return t.dtype
+}
+
+func (t *testTensor) Floats() []float32 {
+	out := make([]float32, len(t.data))
+	copy(out, t.data)
+	return out
+}
+
+func (t *testTensor) Neg(ctx ml.Context) ml.Tensor {
+	out := ctx.Empty(t.DType(), t.Shape()...).(*testTensor)
+	for i := range out.data {
+		out.data[i] = -t.data[i]
+	}
+	return out
+}
+
+func (t *testTensor) Add(ctx ml.Context, t2 ml.Tensor) ml.Tensor {
+	out := ctx.Empty(t.DType(), t.Shape()...).(*testTensor)
+
+	for i := range out.data {
+		out.data[i] = t.data[i] + t2.(*testTensor).data[i]
+	}
+
+	return out
+}
+
+func (t *testTensor) View(ctx ml.Context, offset int, shape ...int) ml.Tensor {
+	offset /= t.elementSize
+
+	var s []int
+
+	switch len(shape) {
+	case 1:
+		s = []int{shape[0]}
+	case 5:
+		s = []int{shape[0], shape[2], shape[4]}
+	default:
+		panic("unsupported number of dimensions")
+	}
+
+	context := &testContext{}
+
+	view := context.Empty(t.dtype, s...).(*testTensor)
+	view.data = t.data[offset : offset+len(view.data)]
+
+	return view
+}
+
+func (t *testTensor) Copy(ctx ml.Context, t2 ml.Tensor) ml.Tensor {
+	copy(t2.(*testTensor).data, t.data)
+	return nil
+}
diff --git a/kvcache/encoder.go b/kvcache/encoder.go
new file mode 100644
index 0000000..0f269c3
--- /dev/null
+++ b/kvcache/encoder.go
@@ -0,0 +1,156 @@
+package kvcache
+
+import (
+	"fmt"
+
+	"github.com/ollama/ollama/ml"
+	"github.com/ollama/ollama/model/input"
+)
+
+// Encoder cache stores K and V tensors that are position independent
+//
+// The tensors can be of any shape and will be returned as they were stored
+// The mask is currently always nil
+//
+// Not currently safe for multiple sequences
+type EncoderCache struct {
+	// config controls mostly backend-specific optimizations
+	config *ml.CacheConfig
+
+	// ** current forward pass **
+
+	// the active layer for Get and Put
+	curLayer int
+
+	// if something is stored during this pass, this
+	// will be the position (but there is no guarantee
+	// anything will be stored)
+	curPos int32
+
+	// curReserve indicates that this forward pass is only for
+	// memory reservation and we should not update our metadata
+	// based on it.
+	curReserve bool
+
+	// ** cache metadata **
+
+	// was something stored in the cache?
+	encoderCached bool
+
+	// position of the cached data
+	encoderPos int32
+
+	// ** cache data storage **
+	backend      ml.Backend
+	ctxs         map[int]ml.Context
+	keys, values map[int]ml.Tensor
+}
+
+func NewEncoderCache() *EncoderCache {
+	return &EncoderCache{
+		ctxs:   make(map[int]ml.Context),
+		keys:   make(map[int]ml.Tensor),
+		values: make(map[int]ml.Tensor),
+	}
+}
+
+func (c *EncoderCache) Init(backend ml.Backend, dtype ml.DType, maxSequences, capacity, maxBatch int) {
+	if c.config == nil {
+		var config ml.CacheConfig
+		if cc, ok := backend.(ml.BackendCacheConfig); ok {
+			config = cc.CacheConfig()
+		}
+		c.config = &config
+	}
+
+	if maxSequences > 1 {
+		panic(fmt.Errorf("encoder cache does not support multiple sequences; requested: %v", maxSequences))
+	}
+
+	if c.config.CachePadding != 0 && c.config.CachePadding != 1 {
+		panic(fmt.Errorf("encoder cache is unable to enforce requested CachePadding (%v)", c.config.CachePadding))
+	}
+
+	c.backend = backend
+}
+
+func (c *EncoderCache) SetConfig(config ml.CacheConfig) {
+	if c.config != nil {
+		panic("config cannot be changed after being previously set, either by the model or backend")
+	}
+
+	c.config = &config
+}
+
+func (c *EncoderCache) Close() {
+	for _, ctx := range c.ctxs {
+		ctx.Close()
+	}
+}
+
+func (c *EncoderCache) StartForward(ctx ml.Context, batch input.Batch, reserve bool) error {
+	// We work with the most recent image
+	if len(batch.Multimodal) > 0 {
+		c.curPos = batch.Positions[batch.Multimodal[len(batch.Multimodal)-1].Index]
+	}
+
+	c.curReserve = reserve
+
+	return nil
+}
+
+func (c *EncoderCache) SetLayer(layer int) {
+	c.curLayer = layer
+}
+
+func (c *EncoderCache) EncoderCached() bool {
+	return c.encoderCached
+}
+
+func (c *EncoderCache) Get(ctx ml.Context) (ml.Tensor, ml.Tensor, ml.Tensor) {
+	return c.keys[c.curLayer], c.values[c.curLayer], nil
+}
+
+func (c *EncoderCache) Put(ctx ml.Context, key, value ml.Tensor) {
+	if !c.curReserve {
+		c.encoderPos = c.curPos
+		c.encoderCached = true
+	}
+
+	if c.config.PermutedV {
+		value = value.Permute(ctx, 1, 2, 0, 3)
+	}
+
+	if _, ok := c.ctxs[c.curLayer]; !ok {
+		c.ctxs[c.curLayer] = c.backend.NewContextSize(2).Layer(c.curLayer)
+	}
+
+	if _, ok := c.keys[c.curLayer]; !ok {
+		c.keys[c.curLayer] = c.ctxs[c.curLayer].Empty(key.DType(), key.Shape()...)
+	}
+
+	if _, ok := c.values[c.curLayer]; !ok {
+		c.values[c.curLayer] = c.ctxs[c.curLayer].Empty(value.DType(), value.Shape()...)
+	}
+
+	ctx.Forward(
+		key.Copy(ctx, c.keys[c.curLayer]),
+		value.Copy(ctx, c.values[c.curLayer]),
+	)
+}
+
+func (c *EncoderCache) CopyPrefix(srcSeq, dstSeq int, len int32) {
+	panic("encoder cache does not support multiple sequences")
+}
+
+func (c *EncoderCache) CanResume(seq int, pos int32) bool {
+	return true
+}
+
+func (c *EncoderCache) Remove(seq int, beginIndex, endIndex int32) error {
+	if c.encoderPos >= beginIndex && c.encoderPos < endIndex {
+		c.encoderCached = false
+	}
+
+	return nil
+}
diff --git a/kvcache/wrapper.go b/kvcache/wrapper.go
new file mode 100644
index 0000000..7533d95
--- /dev/null
+++ b/kvcache/wrapper.go
@@ -0,0 +1,110 @@
+package kvcache
+
+import (
+	"math"
+
+	"github.com/ollama/ollama/ml"
+	"github.com/ollama/ollama/model/input"
+)
+
+// Wrapper cache is a container for multiple types of caches,
+// such as for the encoding and decoding portions of a model.
+type WrapperCache struct {
+	// caches we are wrapping
+	caches []Cache
+
+	// cache to be used for this layer
+	curType int
+}
+
+func NewWrapperCache(caches ...Cache) *WrapperCache {
+	return &WrapperCache{
+		caches: caches,
+	}
+}
+
+func (c *WrapperCache) Init(backend ml.Backend, dtype ml.DType, maxSequences, capacity, maxBatch int) {
+	for _, cache := range c.caches {
+		cache.Init(backend, dtype, maxSequences, capacity, maxBatch)
+	}
+}
+
+func (c *WrapperCache) SetConfig(config ml.CacheConfig) {
+	for _, cache := range c.caches {
+		cache.SetConfig(config)
+	}
+}
+
+func (c *WrapperCache) Close() {
+	for _, cache := range c.caches {
+		cache.Close()
+	}
+}
+
+func (c *WrapperCache) StartForward(ctx ml.Context, batch input.Batch, reserve bool) error {
+	for i, cache := range c.caches {
+		err := cache.StartForward(ctx, batch, reserve)
+		if err != nil {
+			// unwind on error - Remove with endIndex set to math.MaxInt32 does not fail
+			for j := i - 1; j >= 0; j-- {
+				for k := range batch.Positions {
+					_ = c.caches[j].Remove(batch.Sequences[k], batch.Positions[k], math.MaxInt32)
+				}
+			}
+			return err
+		}
+	}
+
+	c.curType = 0
+	return nil
+}
+
+func (c *WrapperCache) SetLayer(layer int) {
+	for _, cache := range c.caches {
+		cache.SetLayer(layer)
+	}
+}
+
+func (c *WrapperCache) SetLayerType(layerType int) {
+	c.curType = layerType
+}
+
+func (c *WrapperCache) UnderlyingCache() Cache {
+	return c.caches[c.curType]
+}
+
+func (c *WrapperCache) Get(ctx ml.Context) (ml.Tensor, ml.Tensor, ml.Tensor) {
+	return c.caches[c.curType].Get(ctx)
+}
+
+func (c *WrapperCache) Put(ctx ml.Context, key, value ml.Tensor) {
+	c.caches[c.curType].Put(ctx, key, value)
+}
+
+func (c *WrapperCache) CopyPrefix(srcSeq, dstSeq int, len int32) {
+	for _, cache := range c.caches {
+		cache.CopyPrefix(srcSeq, dstSeq, len)
+	}
+}
+
+func (c *WrapperCache) CanResume(seq int, pos int32) bool {
+	for _, cache := range c.caches {
+		if !cache.CanResume(seq, pos) {
+			return false
+		}
+	}
+
+	return true
+}
+
+func (c *WrapperCache) Remove(seq int, beginIndex, endIndex int32) error {
+	// If the one of these fails, the caller is supposed to retry with endIndex set to math.MaxInt32, which should not fail
+	for _, cache := range c.caches {
+		err := cache.Remove(seq, beginIndex, endIndex)
+		if err != nil {
+			return err
+		}
+	}
+
+	return nil
+}
diff --git a/llama/.gitignore b/llama/.gitignore
new file mode 100644
index 0000000..ee55204
--- /dev/null
+++ b/llama/.gitignore
@@ -0,0 +1,3 @@
+*.bin
+*.gguf
+build/
\ No newline at end of file
diff --git a/llama/README.md b/llama/README.md
new file mode 100644
index 0000000..bfe66a8
--- /dev/null
+++ b/llama/README.md
@@ -0,0 +1,55 @@
+# `llama`
+
+This package provides Go bindings to [llama.cpp](https://github.com/ggerganov/llama.cpp).
+
+## Vendoring
+
+Ollama vendors [llama.cpp](https://github.com/ggerganov/llama.cpp/) and [ggml](https://github.com/ggerganov/llama.cpp/tree/master/ggml/src). While we generally strive to contribute changes back upstream to avoid drift, we carry a small set of patches which are applied to the tracking commit.
+
+If you update the vendoring code, start by running the following command to establish the tracking llama.cpp repo in the `./vendor/` directory.
+
+```shell
+make -f Makefile.sync apply-patches
+```
+
+### Updating Base Commit
+
+**Pin to new base commit**
+
+To change the base commit, update `FETCH_HEAD` in Makefile.sync.
+
+When updating to a newer base commit, the existing patches may not apply cleanly and require manual merge resolution.
+
+Start by applying the patches. If any of the patches have conflicts, the `git am` will stop at the first failure.
+
+```shell
+make -f Makefile.sync apply-patches
+```
+
+If there are conflicts, you will see an error message. Resolve the conflicts in `./vendor/`, and continue the patch series with `git am --continue` and rerun `make -f Makefile.sync apply-patches`. Repeat until all patches are successfully applied.
+
+Once all patches are applied, commit the changes to the tracking repository.
+
+```shell
+make -f Makefile.sync format-patches sync
+```
+
+### Generating Patches
+
+When working on new fixes or features that impact vendored code, use the following model. First get a clean tracking repo with all current patches applied:
+
+```shell
+make -f Makefile.sync clean apply-patches
+```
+
+Iterate until you're ready to submit PRs. Once your code is ready, commit a change in the `./vendor/` directory, then generate the patches for ollama with
+
+```shell
+make -f Makefile.sync format-patches
+```
+
+In your `./vendor/` directory, create a branch, and cherry-pick the new commit to that branch, then submit a PR upstream to llama.cpp.
+
+Commit the changes in the ollama repo and submit a PR to Ollama, which will include the vendored code update with your change, along with the patches.
+
+After your PR upstream is merged, follow the **Updating Base Commit** instructions above, however first remove your patch before running `apply-patches` since the new base commit contains your change already.
diff --git a/llama/build-info.cpp b/llama/build-info.cpp
new file mode 100644
index 0000000..afef6b8
--- /dev/null
+++ b/llama/build-info.cpp
@@ -0,0 +1,4 @@
+int LLAMA_BUILD_NUMBER = 0;
+char const *LLAMA_COMMIT = "de4c07f93783a1a96456a44dc16b9db538ee1618";
+char const *LLAMA_COMPILER = "";
+char const *LLAMA_BUILD_TARGET = "";
diff --git a/llama/build-info.cpp.in b/llama/build-info.cpp.in
new file mode 100644
index 0000000..07cbd0e
--- /dev/null
+++ b/llama/build-info.cpp.in
@@ -0,0 +1,4 @@
+int LLAMA_BUILD_NUMBER = 0;
+char const *LLAMA_COMMIT = "@FETCH_HEAD@";
+char const *LLAMA_COMPILER = "";
+char const *LLAMA_BUILD_TARGET = "";
diff --git a/llama/llama.cpp/.rsync-filter b/llama/llama.cpp/.rsync-filter
new file mode 100644
index 0000000..1f81b00
--- /dev/null
+++ b/llama/llama.cpp/.rsync-filter
@@ -0,0 +1,23 @@
+protect **/*.go
+include common/
+include common/base64.*
+include common/common.*
+include common/json-schema-to-grammar.*
+include common/json.*
+include common/log.*
+include common/sampling.*
+include common/stb_image.*
+include include/
+include include/llama.*
+include include/llama-*.*
+include tools/
+include tools/mtmd/
+include tools/mtmd/clip.*
+include tools/mtmd/clip-impl.*
+include tools/mtmd/llava.*
+include src/
+include src/llama.*
+include src/llama-*.*
+include src/unicode-data.*
+include src/unicode.*
+exclude *
diff --git a/llama/llama.cpp/LICENSE b/llama/llama.cpp/LICENSE
new file mode 100644
index 0000000..acb96ce
--- /dev/null
+++ b/llama/llama.cpp/LICENSE
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) 2023-2024 The ggml authors
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/llama/llama.cpp/common/base64.hpp b/llama/llama.cpp/common/base64.hpp
new file mode 100644
index 0000000..563247a
--- /dev/null
+++ b/llama/llama.cpp/common/base64.hpp
@@ -0,0 +1,392 @@
+/*
+This is free and unencumbered software released into the public domain.
+
+Anyone is free to copy, modify, publish, use, compile, sell, or
+distribute this software, either in source code form or as a compiled
+binary, for any purpose, commercial or non-commercial, and by any
+means.
+
+In jurisdictions that recognize copyright laws, the author or authors
+of this software dedicate any and all copyright interest in the
+software to the public domain. We make this dedication for the benefit
+of the public at large and to the detriment of our heirs and
+successors. We intend this dedication to be an overt act of
+relinquishment in perpetuity of all present and future rights to this
+software under copyright law.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
+OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
+ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+OTHER DEALINGS IN THE SOFTWARE.
+
+For more information, please refer to 
+*/
+
+#ifndef PUBLIC_DOMAIN_BASE64_HPP_
+#define PUBLIC_DOMAIN_BASE64_HPP_
+
+#include 
+#include 
+#include 
+#include 
+
+class base64_error : public std::runtime_error
+{
+public:
+    using std::runtime_error::runtime_error;
+};
+
+class base64
+{
+public:
+    enum class alphabet
+    {
+        /** the alphabet is detected automatically */
+        auto_,
+        /** the standard base64 alphabet is used */
+        standard,
+        /** like `standard` except that the characters `+` and `/` are replaced by `-` and `_` respectively*/
+        url_filename_safe
+    };
+
+    enum class decoding_behavior
+    {
+        /** if the input is not padded, the remaining bits are ignored */
+        moderate,
+        /** if a padding character is encounter decoding is finished */
+        loose
+    };
+
+    /**
+     Encodes all the elements from `in_begin` to `in_end` to `out`.
+
+     @warning The source and destination cannot overlap. The destination must be able to hold at least
+     `required_encode_size(std::distance(in_begin, in_end))`, otherwise the behavior depends on the output iterator.
+
+     @tparam Input_iterator the source; the returned elements are cast to `std::uint8_t` and should not be greater than
+     8 bits
+     @tparam Output_iterator the destination; the elements written to it are from the type `char`
+     @param in_begin the beginning of the source
+     @param in_end the ending of the source
+     @param out the destination iterator
+     @param alphabet which alphabet should be used
+     @returns the iterator to the next element past the last element copied
+     @throws see `Input_iterator` and `Output_iterator`
+    */
+    template
+    static Output_iterator encode(Input_iterator in_begin, Input_iterator in_end, Output_iterator out,
+                                  alphabet alphabet = alphabet::standard)
+    {
+        constexpr auto pad = '=';
+        const char* alpha  = alphabet == alphabet::url_filename_safe
+                                ? "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_"
+                                : "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/";
+
+        while (in_begin != in_end) {
+            std::uint8_t i0 = 0, i1 = 0, i2 = 0;
+
+            // first character
+            i0 = static_cast(*in_begin);
+            ++in_begin;
+
+            *out = alpha[i0 >> 2 & 0x3f];
+            ++out;
+
+            // part of first character and second
+            if (in_begin != in_end) {
+                i1 = static_cast(*in_begin);
+                ++in_begin;
+
+                *out = alpha[((i0 & 0x3) << 4) | (i1 >> 4 & 0x0f)];
+                ++out;
+            } else {
+                *out = alpha[(i0 & 0x3) << 4];
+                ++out;
+
+                // last padding
+                *out = pad;
+                ++out;
+
+                // last padding
+                *out = pad;
+                ++out;
+
+                break;
+            }
+
+            // part of second character and third
+            if (in_begin != in_end) {
+                i2 = static_cast(*in_begin);
+                ++in_begin;
+
+                *out = alpha[((i1 & 0xf) << 2) | (i2 >> 6 & 0x03)];
+                ++out;
+            } else {
+                *out = alpha[(i1 & 0xf) << 2];
+                ++out;
+
+                // last padding
+                *out = pad;
+                ++out;
+
+                break;
+            }
+
+            // rest of third
+            *out = alpha[i2 & 0x3f];
+            ++out;
+        }
+
+        return out;
+    }
+    /**
+     Encodes a string.
+
+     @param str the string that should be encoded
+     @param alphabet which alphabet should be used
+     @returns the encoded base64 string
+     @throws see base64::encode()
+    */
+    static std::string encode(const std::string& str, alphabet alphabet = alphabet::standard)
+    {
+        std::string result;
+
+        result.reserve(required_encode_size(str.length()) + 1);
+
+        encode(str.begin(), str.end(), std::back_inserter(result), alphabet);
+
+        return result;
+    }
+    /**
+     Encodes a char array.
+
+     @param buffer the char array
+     @param size the size of the array
+     @param alphabet which alphabet should be used
+     @returns the encoded string
+    */
+    static std::string encode(const char* buffer, std::size_t size, alphabet alphabet = alphabet::standard)
+    {
+        std::string result;
+
+        result.reserve(required_encode_size(size) + 1);
+
+        encode(buffer, buffer + size, std::back_inserter(result), alphabet);
+
+        return result;
+    }
+    /**
+     Decodes all the elements from `in_begin` to `in_end` to `out`. `in_begin` may point to the same location as `out`,
+     in other words: inplace decoding is possible.
+
+     @warning The destination must be able to hold at least `required_decode_size(std::distance(in_begin, in_end))`,
+     otherwise the behavior depends on the output iterator.
+
+     @tparam Input_iterator the source; the returned elements are cast to `char`
+     @tparam Output_iterator the destination; the elements written to it are from the type `std::uint8_t`
+     @param in_begin the beginning of the source
+     @param in_end the ending of the source
+     @param out the destination iterator
+     @param alphabet which alphabet should be used
+     @param behavior the behavior when an error was detected
+     @returns the iterator to the next element past the last element copied
+     @throws base64_error depending on the set behavior
+     @throws see `Input_iterator` and `Output_iterator`
+    */
+    template
+    static Output_iterator decode(Input_iterator in_begin, Input_iterator in_end, Output_iterator out,
+                                  alphabet alphabet          = alphabet::auto_,
+                                  decoding_behavior behavior = decoding_behavior::moderate)
+    {
+        //constexpr auto pad = '=';
+        std::uint8_t last  = 0;
+        auto bits          = 0;
+
+        while (in_begin != in_end) {
+            auto c = *in_begin;
+            ++in_begin;
+
+            if (c == '=') {
+                break;
+            }
+
+            auto part = _base64_value(alphabet, c);
+
+            // enough bits for one byte
+            if (bits + 6 >= 8) {
+                *out = (last << (8 - bits)) | (part >> (bits - 2));
+                ++out;
+
+                bits -= 2;
+            } else {
+                bits += 6;
+            }
+
+            last = part;
+        }
+
+        // check padding
+        if (behavior != decoding_behavior::loose) {
+            while (in_begin != in_end) {
+                auto c = *in_begin;
+                ++in_begin;
+
+                if (c != '=') {
+                    throw base64_error("invalid base64 character.");
+                }
+            }
+        }
+
+        return out;
+    }
+    /**
+     Decodes a string.
+
+     @param str the base64 encoded string
+     @param alphabet which alphabet should be used
+     @param behavior the behavior when an error was detected
+     @returns the decoded string
+     @throws see base64::decode()
+    */
+    static std::string decode(const std::string& str, alphabet alphabet = alphabet::auto_,
+                              decoding_behavior behavior = decoding_behavior::moderate)
+    {
+        std::string result;
+
+        result.reserve(max_decode_size(str.length()));
+
+        decode(str.begin(), str.end(), std::back_inserter(result), alphabet, behavior);
+
+        return result;
+    }
+    /**
+     Decodes a string.
+
+     @param buffer the base64 encoded buffer
+     @param size the size of the buffer
+     @param alphabet which alphabet should be used
+     @param behavior the behavior when an error was detected
+     @returns the decoded string
+     @throws see base64::decode()
+    */
+    static std::string decode(const char* buffer, std::size_t size, alphabet alphabet = alphabet::auto_,
+                              decoding_behavior behavior = decoding_behavior::moderate)
+    {
+        std::string result;
+
+        result.reserve(max_decode_size(size));
+
+        decode(buffer, buffer + size, std::back_inserter(result), alphabet, behavior);
+
+        return result;
+    }
+    /**
+     Decodes a string inplace.
+
+     @param[in,out] str the base64 encoded string
+     @param alphabet which alphabet should be used
+     @param behavior the behavior when an error was detected
+     @throws base64::decode_inplace()
+    */
+    static void decode_inplace(std::string& str, alphabet alphabet = alphabet::auto_,
+                               decoding_behavior behavior = decoding_behavior::moderate)
+    {
+        str.resize(decode(str.begin(), str.end(), str.begin(), alphabet, behavior) - str.begin());
+    }
+    /**
+     Decodes a char array inplace.
+
+     @param[in,out] str the string array
+     @param size the length of the array
+     @param alphabet which alphabet should be used
+     @param behavior the behavior when an error was detected
+     @returns the pointer to the next element past the last element decoded
+     @throws base64::decode_inplace()
+    */
+    static char* decode_inplace(char* str, std::size_t size, alphabet alphabet = alphabet::auto_,
+                                decoding_behavior behavior = decoding_behavior::moderate)
+    {
+        return decode(str, str + size, str, alphabet, behavior);
+    }
+    /**
+     Returns the required decoding size for a given size. The value is calculated with the following formula:
+
+     $$
+     \lceil \frac{size}{4} \rceil \cdot 3
+     $$
+
+     @param size the size of the encoded input
+     @returns the size of the resulting decoded buffer; this the absolute maximum
+    */
+    static std::size_t max_decode_size(std::size_t size) noexcept
+    {
+        return (size / 4 + (size % 4 ? 1 : 0)) * 3;
+    }
+    /**
+     Returns the required encoding size for a given size. The value is calculated with the following formula:
+
+     $$
+     \lceil \frac{size}{3} \rceil \cdot 4
+     $$
+
+     @param size the size of the decoded input
+     @returns the size of the resulting encoded buffer
+    */
+    static std::size_t required_encode_size(std::size_t size) noexcept
+    {
+        return (size / 3 + (size % 3 ? 1 : 0)) * 4;
+    }
+
+private:
+    static std::uint8_t _base64_value(alphabet& alphabet, char c)
+    {
+        if (c >= 'A' && c <= 'Z') {
+            return c - 'A';
+        } else if (c >= 'a' && c <= 'z') {
+            return c - 'a' + 26;
+        } else if (c >= '0' && c <= '9') {
+            return c - '0' + 52;
+        }
+
+        // comes down to alphabet
+        if (alphabet == alphabet::standard) {
+            if (c == '+') {
+                return 62;
+            } else if (c == '/') {
+                return 63;
+            }
+        } else if (alphabet == alphabet::url_filename_safe) {
+            if (c == '-') {
+                return 62;
+            } else if (c == '_') {
+                return 63;
+            }
+        } // auto detect
+        else {
+            if (c == '+') {
+                alphabet = alphabet::standard;
+
+                return 62;
+            } else if (c == '/') {
+                alphabet = alphabet::standard;
+
+                return 63;
+            } else if (c == '-') {
+                alphabet = alphabet::url_filename_safe;
+
+                return 62;
+            } else if (c == '_') {
+                alphabet = alphabet::url_filename_safe;
+
+                return 63;
+            }
+        }
+
+        throw base64_error("invalid base64 character.");
+    }
+};
+
+#endif // !PUBLIC_DOMAIN_BASE64_HPP_
diff --git a/llama/llama.cpp/common/common.cpp b/llama/llama.cpp/common/common.cpp
new file mode 100644
index 0000000..2b1d8da
--- /dev/null
+++ b/llama/llama.cpp/common/common.cpp
@@ -0,0 +1,1584 @@
+#if defined(_MSC_VER)
+#define _SILENCE_CXX17_CODECVT_HEADER_DEPRECATION_WARNING
+#endif
+
+#include "ggml.h"
+#include "gguf.h"
+
+#include "common.h"
+#include "log.h"
+#include "llama.h"
+
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+
+#if defined(__APPLE__) && defined(__MACH__)
+#include 
+#include 
+#endif
+
+#if defined(_WIN32)
+#define WIN32_LEAN_AND_MEAN
+#ifndef NOMINMAX
+#   define NOMINMAX
+#endif
+#include 
+#include 
+#include 
+#include 
+#else
+#include 
+#include 
+#include 
+#endif
+
+#if defined(_MSC_VER)
+#pragma warning(disable: 4244 4267) // possible loss of data
+#endif
+
+//
+// CPU utils
+//
+
+int32_t cpu_get_num_physical_cores() {
+#ifdef __linux__
+    // enumerate the set of thread siblings, num entries is num cores
+    std::unordered_set siblings;
+    for (uint32_t cpu=0; cpu < UINT32_MAX; ++cpu) {
+        std::ifstream thread_siblings("/sys/devices/system/cpu/cpu"
+            + std::to_string(cpu) + "/topology/thread_siblings");
+        if (!thread_siblings.is_open()) {
+            break; // no more cpus
+        }
+        std::string line;
+        if (std::getline(thread_siblings, line)) {
+            siblings.insert(line);
+        }
+    }
+    if (!siblings.empty()) {
+        return static_cast(siblings.size());
+    }
+#elif defined(__APPLE__) && defined(__MACH__)
+    int32_t num_physical_cores;
+    size_t len = sizeof(num_physical_cores);
+    int result = sysctlbyname("hw.perflevel0.physicalcpu", &num_physical_cores, &len, NULL, 0);
+    if (result == 0) {
+        return num_physical_cores;
+    }
+    result = sysctlbyname("hw.physicalcpu", &num_physical_cores, &len, NULL, 0);
+    if (result == 0) {
+        return num_physical_cores;
+    }
+#elif defined(_WIN32) && (_WIN32_WINNT >= 0x0601) && !defined(__MINGW64__) // windows 7 and later
+    // TODO: windows + arm64 + mingw64
+    unsigned int n_threads_win = std::thread::hardware_concurrency();
+    unsigned int default_threads = n_threads_win > 0 ? (n_threads_win <= 4 ? n_threads_win : n_threads_win / 2) : 4;
+
+    DWORD buffer_size = 0;
+    if (!GetLogicalProcessorInformationEx(RelationProcessorCore, nullptr, &buffer_size)) {
+        if (GetLastError() != ERROR_INSUFFICIENT_BUFFER) {
+            return default_threads;
+        }
+    }
+
+    std::vector buffer(buffer_size);
+    if (!GetLogicalProcessorInformationEx(RelationProcessorCore, reinterpret_cast(buffer.data()), &buffer_size)) {
+        return default_threads;
+    }
+
+    int32_t num_physical_cores = 0;
+    PSYSTEM_LOGICAL_PROCESSOR_INFORMATION_EX info = reinterpret_cast(buffer.data());
+    while (buffer_size > 0) {
+        if (info->Relationship == RelationProcessorCore) {
+            num_physical_cores += info->Processor.GroupCount;
+        }
+        buffer_size -= info->Size;
+        info = reinterpret_cast(reinterpret_cast(info) + info->Size);
+    }
+
+    return num_physical_cores > 0 ? num_physical_cores : default_threads;
+#endif
+    unsigned int n_threads = std::thread::hardware_concurrency();
+    return n_threads > 0 ? (n_threads <= 4 ? n_threads : n_threads / 2) : 4;
+}
+
+#if defined(__x86_64__) && defined(__linux__) && !defined(__ANDROID__)
+#include 
+
+static void cpuid(unsigned leaf, unsigned subleaf,
+                  unsigned *eax, unsigned *ebx, unsigned *ecx, unsigned *edx) {
+    __asm__("movq\t%%rbx,%%rsi\n\t"
+            "cpuid\n\t"
+            "xchgq\t%%rbx,%%rsi"
+            : "=a"(*eax), "=S"(*ebx), "=c"(*ecx), "=d"(*edx)
+            : "0"(leaf), "2"(subleaf));
+}
+
+static int pin_cpu(int cpu) {
+    cpu_set_t mask;
+    CPU_ZERO(&mask);
+    CPU_SET(cpu, &mask);
+    return pthread_setaffinity_np(pthread_self(), sizeof(mask), &mask);
+}
+
+static bool is_hybrid_cpu(void) {
+    unsigned eax, ebx, ecx, edx;
+    cpuid(7, 0, &eax, &ebx, &ecx, &edx);
+    return !!(edx & (1u << 15));
+}
+
+static bool is_running_on_efficiency_core(void) {
+    unsigned eax, ebx, ecx, edx;
+    cpuid(0x1a, 0, &eax, &ebx, &ecx, &edx);
+    int intel_atom = 0x20;
+    int core_type = (eax & 0xff000000u) >> 24;
+    return core_type == intel_atom;
+}
+
+static int cpu_count_math_cpus(int n_cpu) {
+    int result = 0;
+    for (int cpu = 0; cpu < n_cpu; ++cpu) {
+        if (pin_cpu(cpu)) {
+            return -1;
+        }
+        if (is_running_on_efficiency_core()) {
+            continue; // efficiency cores harm lockstep threading
+        }
+        ++cpu; // hyperthreading isn't useful for linear algebra
+        ++result;
+    }
+    return result;
+}
+
+#endif // __x86_64__ && __linux__
+
+/**
+ * Returns number of CPUs on system that are useful for math.
+ */
+int32_t cpu_get_num_math() {
+#if defined(__x86_64__) && defined(__linux__) && !defined(__ANDROID__)
+    int n_cpu = sysconf(_SC_NPROCESSORS_ONLN);
+    if (n_cpu < 1) {
+        return cpu_get_num_physical_cores();
+    }
+    if (is_hybrid_cpu()) {
+        cpu_set_t affinity;
+        if (!pthread_getaffinity_np(pthread_self(), sizeof(affinity), &affinity)) {
+            int result = cpu_count_math_cpus(n_cpu);
+            pthread_setaffinity_np(pthread_self(), sizeof(affinity), &affinity);
+            if (result > 0) {
+                return result;
+            }
+        }
+    }
+#endif
+    return cpu_get_num_physical_cores();
+}
+
+// Helper for setting process priority
+
+#if defined(_WIN32)
+
+bool set_process_priority(enum ggml_sched_priority prio) {
+    if (prio == GGML_SCHED_PRIO_NORMAL) {
+        return true;
+    }
+
+    DWORD p = NORMAL_PRIORITY_CLASS;
+    switch (prio) {
+        case GGML_SCHED_PRIO_NORMAL:   p = NORMAL_PRIORITY_CLASS;       break;
+        case GGML_SCHED_PRIO_MEDIUM:   p = ABOVE_NORMAL_PRIORITY_CLASS; break;
+        case GGML_SCHED_PRIO_HIGH:     p = HIGH_PRIORITY_CLASS;         break;
+        case GGML_SCHED_PRIO_REALTIME: p = REALTIME_PRIORITY_CLASS;     break;
+    }
+
+    if (!SetPriorityClass(GetCurrentProcess(), p)) {
+        LOG_WRN("failed to set process priority class %d : (%d)\n", prio, (int) GetLastError());
+        return false;
+    }
+
+    return true;
+}
+
+#else // MacOS and POSIX
+#include 
+#include 
+
+bool set_process_priority(enum ggml_sched_priority prio) {
+    if (prio == GGML_SCHED_PRIO_NORMAL) {
+        return true;
+    }
+
+    int p = 0;
+    switch (prio) {
+        case GGML_SCHED_PRIO_NORMAL:   p =  0;  break;
+        case GGML_SCHED_PRIO_MEDIUM:   p = -5;  break;
+        case GGML_SCHED_PRIO_HIGH:     p = -10; break;
+        case GGML_SCHED_PRIO_REALTIME: p = -20; break;
+    }
+
+    if (!setpriority(PRIO_PROCESS, 0, p)) {
+        LOG_WRN("failed to set process priority %d : %s (%d)\n", prio, strerror(errno), errno);
+        return false;
+    }
+    return true;
+}
+
+#endif
+
+//
+// CLI argument parsing
+//
+
+
+void postprocess_cpu_params(cpu_params& cpuparams, const cpu_params* role_model) {
+    int32_t n_set = 0;
+
+    if (cpuparams.n_threads < 0) {
+        // Assuming everything about cpuparams is invalid
+        if (role_model != nullptr) {
+            cpuparams = *role_model;
+        } else {
+            cpuparams.n_threads = cpu_get_num_math();
+        }
+    }
+
+    for (int32_t i = 0; i < GGML_MAX_N_THREADS; i++) {
+        if (cpuparams.cpumask[i]) {
+            n_set++;
+        }
+    }
+
+    if (n_set && n_set < cpuparams.n_threads) {
+        // Not enough set bits, may experience performance issues.
+        LOG_WRN("Not enough set bits in CPU mask (%d) to satisfy requested thread count: %d\n", n_set, cpuparams.n_threads);
+    }
+}
+
+bool parse_cpu_range(const std::string & range, bool (&boolmask)[GGML_MAX_N_THREADS]) {
+    size_t dash_loc = range.find('-');
+    if (dash_loc == std::string::npos) {
+        LOG_ERR("Format of CPU range is invalid! Expected []-[].\n");
+        return false;
+    }
+
+    size_t start_i;
+    size_t end_i;
+
+    if (dash_loc == 0) {
+        start_i = 0;
+    } else {
+        start_i = std::stoull(range.substr(0, dash_loc));
+        if (start_i >= GGML_MAX_N_THREADS) {
+            LOG_ERR("Start index out of bounds!\n");
+            return false;
+        }
+    }
+
+    if (dash_loc == range.length() - 1) {
+        end_i = GGML_MAX_N_THREADS - 1;
+    } else {
+        end_i = std::stoull(range.substr(dash_loc + 1));
+        if (end_i >= GGML_MAX_N_THREADS) {
+            LOG_ERR("End index out of bounds!\n");
+            return false;
+        }
+    }
+
+    for (size_t i = start_i; i <= end_i; i++) {
+        boolmask[i] = true;
+    }
+
+    return true;
+}
+
+bool parse_cpu_mask(const std::string & mask, bool (&boolmask)[GGML_MAX_N_THREADS]) {
+    // Discard potential 0x prefix
+    size_t start_i = 0;
+    if (mask.length() >= 2 && mask.substr(0, 2) == "0x") {
+        start_i = 2;
+    }
+
+    size_t num_digits = mask.length() - start_i;
+    if (num_digits > 128) num_digits = 128;
+
+    size_t end_i = num_digits + start_i;
+
+    for (size_t i = start_i, n = (num_digits*4 - 1); i < end_i; i++, n-=4) {
+        char c = mask.at(i);
+        int8_t id = c;
+
+        if ((c >= '0' && c <= '9')) {
+            id -= '0';
+        } else if (c >= 'a' && c <= 'f') {
+            id -= 'a' - 10;
+        } else if (c >= 'A' && c <= 'F') {
+            id -= 'A' - 10;
+        } else {
+            LOG_ERR("Invalid hex character '%c' at position %d\n", c, int32_t(i));
+            return false;
+        }
+
+        boolmask[  n  ] = boolmask[  n  ] || ((id & 8) != 0);
+        boolmask[n - 1] = boolmask[n - 1] || ((id & 4) != 0);
+        boolmask[n - 2] = boolmask[n - 2] || ((id & 2) != 0);
+        boolmask[n - 3] = boolmask[n - 3] || ((id & 1) != 0);
+    }
+
+    return true;
+}
+
+void common_init() {
+    llama_log_set([](ggml_log_level level, const char * text, void * /*user_data*/) {
+        if (LOG_DEFAULT_LLAMA <= common_log_verbosity_thold) {
+            common_log_add(common_log_main(), level, "%s", text);
+        }
+    }, NULL);
+
+#ifdef NDEBUG
+    const char * build_type = "";
+#else
+    const char * build_type = " (debug)";
+#endif
+
+    LOG_INF("build: %d (%s) with %s for %s%s\n", LLAMA_BUILD_NUMBER, LLAMA_COMMIT, LLAMA_COMPILER, LLAMA_BUILD_TARGET, build_type);
+}
+
+std::string common_params_get_system_info(const common_params & params) {
+    std::ostringstream os;
+
+    os << "system_info: n_threads = " << params.cpuparams.n_threads;
+    if (params.cpuparams_batch.n_threads != -1) {
+        os << " (n_threads_batch = " << params.cpuparams_batch.n_threads << ")";
+    }
+#if defined(_WIN32) && (_WIN32_WINNT >= 0x0601) && !defined(__MINGW64__) // windows 7 and later
+    // TODO: windows + arm64 + mingw64
+    DWORD logicalProcessorCount = GetActiveProcessorCount(ALL_PROCESSOR_GROUPS);
+    os << " / " << logicalProcessorCount << " | " << llama_print_system_info();
+#else
+    os << " / " << std::thread::hardware_concurrency() << " | " << llama_print_system_info();
+#endif
+
+    return os.str();
+}
+
+//
+// String utils
+//
+
+std::string string_format(const char * fmt, ...) {
+    va_list ap;
+    va_list ap2;
+    va_start(ap, fmt);
+    va_copy(ap2, ap);
+    int size = vsnprintf(NULL, 0, fmt, ap);
+    GGML_ASSERT(size >= 0 && size < INT_MAX); // NOLINT
+    std::vector buf(size + 1);
+    int size2 = vsnprintf(buf.data(), size + 1, fmt, ap2);
+    GGML_ASSERT(size2 == size);
+    va_end(ap2);
+    va_end(ap);
+    return std::string(buf.data(), size);
+}
+
+std::string string_strip(const std::string & str) {
+    size_t start = 0;
+    size_t end = str.size();
+    while (start < end && std::isspace(str[start])) {
+        start++;
+    }
+    while (end > start && std::isspace(str[end - 1])) {
+        end--;
+    }
+    return str.substr(start, end - start);
+}
+
+std::string string_get_sortable_timestamp() {
+    using clock = std::chrono::system_clock;
+
+    const clock::time_point current_time = clock::now();
+    const time_t as_time_t = clock::to_time_t(current_time);
+    char timestamp_no_ns[100];
+    std::strftime(timestamp_no_ns, 100, "%Y_%m_%d-%H_%M_%S", std::localtime(&as_time_t));
+
+    const int64_t ns = std::chrono::duration_cast(
+        current_time.time_since_epoch() % 1000000000).count();
+    char timestamp_ns[11];
+    snprintf(timestamp_ns, 11, "%09" PRId64, ns);
+
+    return std::string(timestamp_no_ns) + "." + std::string(timestamp_ns);
+}
+
+void string_replace_all(std::string & s, const std::string & search, const std::string & replace) {
+    if (search.empty()) {
+        return;
+    }
+    std::string builder;
+    builder.reserve(s.length());
+    size_t pos = 0;
+    size_t last_pos = 0;
+    while ((pos = s.find(search, last_pos)) != std::string::npos) {
+        builder.append(s, last_pos, pos - last_pos);
+        builder.append(replace);
+        last_pos = pos + search.length();
+    }
+    builder.append(s, last_pos, std::string::npos);
+    s = std::move(builder);
+}
+
+std::string regex_escape(const std::string & s) {
+    static const std::regex special_chars("[.^$|()*+?\\[\\]{}\\\\]");
+    return std::regex_replace(s, special_chars, "\\$0");
+}
+
+std::string string_join(const std::vector & values, const std::string & separator) {
+    std::ostringstream result;
+    for (size_t i = 0; i < values.size(); ++i) {
+        if (i > 0) {
+            result << separator;
+        }
+        result << values[i];
+    }
+    return result.str();
+}
+
+std::vector string_split(const std::string & str, const std::string & delimiter) {
+    std::vector parts;
+    size_t start = 0;
+    size_t end = str.find(delimiter);
+
+    while (end != std::string::npos) {
+        parts.push_back(str.substr(start, end - start));
+        start = end + delimiter.length();
+        end = str.find(delimiter, start);
+    }
+
+    parts.push_back(str.substr(start));
+
+    return parts;
+}
+
+std::string string_repeat(const std::string & str, size_t n) {
+    if (n == 0) {
+        return "";
+    }
+
+    std::string result;
+    result.reserve(str.length() * n);
+
+    for (size_t i = 0; i < n; ++i) {
+        result += str;
+    }
+
+    return result;
+}
+
+std::string string_from(bool value) {
+    return value ? "true" : "false";
+}
+
+std::string string_from(const std::vector & values) {
+    std::stringstream buf;
+
+    buf << "[ ";
+    bool first = true;
+    for (auto e : values) {
+        if (first) {
+            first = false;
+        } else {
+            buf << ", ";
+        }
+        buf << std::to_string(e);
+    }
+    buf << " ]";
+
+    return buf.str();
+}
+
+std::string string_from(const struct llama_context * ctx, const std::vector & tokens) {
+    std::stringstream buf;
+
+    buf << "[ ";
+
+    bool first = true;
+    for (const auto & token : tokens) {
+        if (!first) {
+            buf << ", ";
+        } else {
+            first = false;
+        }
+
+        auto detokenized = common_token_to_piece(ctx, token);
+
+        detokenized.erase(
+            std::remove_if(
+                detokenized.begin(),
+                detokenized.end(),
+                [](const unsigned char c) { return !std::isprint(c); }),
+            detokenized.end());
+
+        buf << "'" << detokenized << "'"
+            << ":" << std::to_string(token);
+    }
+
+    buf << " ]";
+
+    return buf.str();
+}
+
+std::string string_from(const struct llama_context * ctx, const struct llama_batch & batch) {
+    std::stringstream buf;
+
+    buf << "[ ";
+
+    bool first = true;
+    for (int i = 0; i < batch.n_tokens; ++i) {
+        if (!first) {
+            buf << ", ";
+        } else {
+            first = false;
+        }
+
+        auto detokenized = common_token_to_piece(ctx, batch.token[i]);
+
+        detokenized.erase(
+                std::remove_if(
+                    detokenized.begin(),
+                    detokenized.end(),
+                    [](const unsigned char c) { return !std::isprint(c); }),
+                detokenized.end());
+
+        buf << "\n"          << std::to_string(i)
+            << ", token '"   << detokenized << "'"
+            << ", pos "      << std::to_string(batch.pos[i])
+            << ", n_seq_id " << std::to_string(batch.n_seq_id[i])
+            << ", seq_id "   << std::to_string(batch.seq_id[i][0])
+            << ", logits "   << std::to_string(batch.logits[i]);
+    }
+
+    buf << " ]";
+
+    return buf.str();
+}
+
+void string_process_escapes(std::string & input) {
+    std::size_t input_len = input.length();
+    std::size_t output_idx = 0;
+
+    for (std::size_t input_idx = 0; input_idx < input_len; ++input_idx) {
+        if (input[input_idx] == '\\' && input_idx + 1 < input_len) {
+            switch (input[++input_idx]) {
+                case 'n':  input[output_idx++] = '\n'; break;
+                case 'r':  input[output_idx++] = '\r'; break;
+                case 't':  input[output_idx++] = '\t'; break;
+                case '\'': input[output_idx++] = '\''; break;
+                case '\"': input[output_idx++] = '\"'; break;
+                case '\\': input[output_idx++] = '\\'; break;
+                case 'x':
+                    // Handle \x12, etc
+                    if (input_idx + 2 < input_len) {
+                        const char x[3] = { input[input_idx + 1], input[input_idx + 2], 0 };
+                        char *err_p = nullptr;
+                        const long val = std::strtol(x, &err_p, 16);
+                        if (err_p == x + 2) {
+                            input_idx += 2;
+                            input[output_idx++] = char(val);
+                            break;
+                        }
+                    }
+                    // fall through
+                default:   input[output_idx++] = '\\';
+                           input[output_idx++] = input[input_idx]; break;
+            }
+        } else {
+            input[output_idx++] = input[input_idx];
+        }
+    }
+
+    input.resize(output_idx);
+}
+
+bool string_parse_kv_override(const char * data, std::vector & overrides) {
+    const char * sep = strchr(data, '=');
+    if (sep == nullptr || sep - data >= 128) {
+        LOG_ERR("%s: malformed KV override '%s'\n", __func__, data);
+        return false;
+    }
+    llama_model_kv_override kvo;
+    std::strncpy(kvo.key, data, sep - data);
+    kvo.key[sep - data] = 0;
+    sep++;
+    if (strncmp(sep, "int:", 4) == 0) {
+        sep += 4;
+        kvo.tag = LLAMA_KV_OVERRIDE_TYPE_INT;
+        kvo.val_i64 = std::atol(sep);
+    } else if (strncmp(sep, "float:", 6) == 0) {
+        sep += 6;
+        kvo.tag = LLAMA_KV_OVERRIDE_TYPE_FLOAT;
+        kvo.val_f64 = std::atof(sep);
+    } else if (strncmp(sep, "bool:", 5) == 0) {
+        sep += 5;
+        kvo.tag = LLAMA_KV_OVERRIDE_TYPE_BOOL;
+        if (std::strcmp(sep, "true") == 0) {
+            kvo.val_bool = true;
+        } else if (std::strcmp(sep, "false") == 0) {
+            kvo.val_bool = false;
+        } else {
+            LOG_ERR("%s: invalid boolean value for KV override '%s'\n", __func__, data);
+            return false;
+        }
+    } else if (strncmp(sep, "str:", 4) == 0) {
+        sep += 4;
+        kvo.tag = LLAMA_KV_OVERRIDE_TYPE_STR;
+        if (strlen(sep) > 127) {
+            LOG_ERR("%s: malformed KV override '%s', value cannot exceed 127 chars\n", __func__, data);
+            return false;
+        }
+        strncpy(kvo.val_str, sep, 127);
+        kvo.val_str[127] = '\0';
+    } else {
+        LOG_ERR("%s: invalid type for KV override '%s'\n", __func__, data);
+        return false;
+    }
+    overrides.emplace_back(std::move(kvo));
+    return true;
+}
+
+//
+// Filesystem utils
+//
+
+// Validate if a filename is safe to use
+// To validate a full path, split the path by the OS-specific path separator, and validate each part with this function
+bool fs_validate_filename(const std::string & filename) {
+    if (!filename.length()) {
+        // Empty filename invalid
+        return false;
+    }
+    if (filename.length() > 255) {
+        // Limit at common largest possible filename on Linux filesystems
+        // to avoid unnecessary further validation
+        // (On systems with smaller limits it will be caught by the OS)
+        return false;
+    }
+
+    std::u32string filename_utf32;
+    try {
+#if defined(__clang__)
+        // disable C++17 deprecation warning for std::codecvt_utf8
+#    pragma clang diagnostic push
+#    pragma clang diagnostic ignored "-Wdeprecated-declarations"
+#endif
+        std::wstring_convert, char32_t> converter;
+
+#if defined(__clang__)
+#    pragma clang diagnostic pop
+#endif
+
+        filename_utf32 = converter.from_bytes(filename);
+
+        // If the reverse conversion mismatches, it means overlong UTF-8 sequences were used,
+        // or invalid encodings were encountered. Reject such attempts
+        std::string filename_reencoded = converter.to_bytes(filename_utf32);
+        if (filename_reencoded != filename) {
+            return false;
+        }
+    } catch (const std::exception &) {
+        return false;
+    }
+
+    // Check for forbidden codepoints:
+    // - Control characters
+    // - Unicode equivalents of illegal characters
+    // - UTF-16 surrogate pairs
+    // - UTF-8 replacement character
+    // - Byte order mark (BOM)
+    // - Illegal characters: / \ : * ? " < > |
+    for (char32_t c : filename_utf32) {
+        if (c <= 0x1F // Control characters (C0)
+            || c == 0x7F // Control characters (DEL)
+            || (c >= 0x80 && c <= 0x9F) // Control characters (C1)
+            || c == 0xFF0E // Fullwidth Full Stop (period equivalent)
+            || c == 0x2215 // Division Slash (forward slash equivalent)
+            || c == 0x2216 // Set Minus (backslash equivalent)
+            || (c >= 0xD800 && c <= 0xDFFF) // UTF-16 surrogate pairs
+            || c == 0xFFFD // Replacement Character (UTF-8)
+            || c == 0xFEFF // Byte Order Mark (BOM)
+            || c == '/' || c == '\\' || c == ':' || c == '*' // Illegal characters
+            || c == '?' || c == '"' || c == '<' || c == '>' || c == '|') {
+            return false;
+        }
+    }
+
+    // Reject any leading or trailing ' ', or any trailing '.', these are stripped on Windows and will cause a different filename
+    // Unicode and other whitespace is not affected, only 0x20 space
+    if (filename.front() == ' ' || filename.back() == ' ' || filename.back() == '.') {
+        return false;
+    }
+
+    // Reject any ".." (currently stricter than necessary, it should be fine to just check for == ".." instead)
+    if (filename.find("..") != std::string::npos) {
+        return false;
+    }
+
+    // Reject "."
+    if (filename == ".") {
+        return false;
+    }
+
+    return true;
+}
+
+// returns true if successful, false otherwise
+bool fs_create_directory_with_parents(const std::string & path) {
+#ifdef _WIN32
+    std::wstring_convert> converter;
+    std::wstring wpath = converter.from_bytes(path);
+
+    // if the path already exists, check whether it's a directory
+    const DWORD attributes = GetFileAttributesW(wpath.c_str());
+    if ((attributes != INVALID_FILE_ATTRIBUTES) && (attributes & FILE_ATTRIBUTE_DIRECTORY)) {
+        return true;
+    }
+
+    size_t pos_slash = 0;
+
+    // process path from front to back, procedurally creating directories
+    while ((pos_slash = path.find('\\', pos_slash)) != std::string::npos) {
+        const std::wstring subpath = wpath.substr(0, pos_slash);
+        const wchar_t * test = subpath.c_str();
+
+        const bool success = CreateDirectoryW(test, NULL);
+        if (!success) {
+            const DWORD error = GetLastError();
+
+            // if the path already exists, ensure that it's a directory
+            if (error == ERROR_ALREADY_EXISTS) {
+                const DWORD attributes = GetFileAttributesW(subpath.c_str());
+                if (attributes == INVALID_FILE_ATTRIBUTES || !(attributes & FILE_ATTRIBUTE_DIRECTORY)) {
+                    return false;
+                }
+            } else {
+                return false;
+            }
+        }
+
+        pos_slash += 1;
+    }
+
+    return true;
+#else
+    // if the path already exists, check whether it's a directory
+    struct stat info;
+    if (stat(path.c_str(), &info) == 0) {
+        return S_ISDIR(info.st_mode);
+    }
+
+    size_t pos_slash = 1; // skip leading slashes for directory creation
+
+    // process path from front to back, procedurally creating directories
+    while ((pos_slash = path.find('/', pos_slash)) != std::string::npos) {
+        const std::string subpath = path.substr(0, pos_slash);
+        struct stat info;
+
+        // if the path already exists, ensure that it's a directory
+        if (stat(subpath.c_str(), &info) == 0) {
+            if (!S_ISDIR(info.st_mode)) {
+                return false;
+            }
+        } else {
+            // create parent directories
+            const int ret = mkdir(subpath.c_str(), 0755);
+            if (ret != 0) {
+                return false;
+            }
+        }
+
+        pos_slash += 1;
+    }
+
+    return true;
+#endif // _WIN32
+}
+
+std::string fs_get_cache_directory() {
+    std::string cache_directory = "";
+    auto ensure_trailing_slash = [](std::string p) {
+        // Make sure to add trailing slash
+        if (p.back() != DIRECTORY_SEPARATOR) {
+            p += DIRECTORY_SEPARATOR;
+        }
+        return p;
+    };
+    if (getenv("LLAMA_CACHE")) {
+        cache_directory = std::getenv("LLAMA_CACHE");
+    } else {
+#if defined(__linux__) || defined(__FreeBSD__) || defined(_AIX)
+        if (std::getenv("XDG_CACHE_HOME")) {
+            cache_directory = std::getenv("XDG_CACHE_HOME");
+        } else {
+            cache_directory = std::getenv("HOME") + std::string("/.cache/");
+        }
+#elif defined(__APPLE__)
+        cache_directory = std::getenv("HOME") + std::string("/Library/Caches/");
+#elif defined(_WIN32)
+        cache_directory = std::getenv("LOCALAPPDATA");
+#else
+#  error Unknown architecture
+#endif
+        cache_directory = ensure_trailing_slash(cache_directory);
+        cache_directory += "llama.cpp";
+    }
+    return ensure_trailing_slash(cache_directory);
+}
+
+std::string fs_get_cache_file(const std::string & filename) {
+    GGML_ASSERT(filename.find(DIRECTORY_SEPARATOR) == std::string::npos);
+    std::string cache_directory = fs_get_cache_directory();
+    const bool success = fs_create_directory_with_parents(cache_directory);
+    if (!success) {
+        throw std::runtime_error("failed to create cache directory: " + cache_directory);
+    }
+    return cache_directory + filename;
+}
+
+
+//
+// Model utils
+//
+
+struct common_init_result common_init_from_params(common_params & params) {
+    common_init_result iparams;
+    auto mparams = common_model_params_to_llama(params);
+
+    llama_model * model = llama_model_load_from_file(params.model.path.c_str(), mparams);
+    if (model == NULL) {
+        LOG_ERR("%s: failed to load model '%s'\n", __func__, params.model.path.c_str());
+        return iparams;
+    }
+
+    const llama_vocab * vocab = llama_model_get_vocab(model);
+
+    if (params.reranking) {
+        bool ok = true;
+
+        if (llama_vocab_bos(vocab) == LLAMA_TOKEN_NULL) {
+            LOG_WRN("%s: warning: vocab does not have a  BOS token, reranking will not work\n", __func__);
+            ok = false;
+        }
+
+        if (llama_vocab_eos(vocab) == LLAMA_TOKEN_NULL) {
+            LOG_WRN("%s: warning: vocab does not have an EOS token, reranking will not work\n", __func__);
+            ok = false;
+        }
+
+        if (llama_vocab_sep(vocab) == LLAMA_TOKEN_NULL) {
+            LOG_WRN("%s: warning: vocab does not have a  SEP token, reranking will not work\n", __func__);
+            ok = false;
+        }
+
+        if (!ok) {
+            llama_model_free(model);
+
+            return iparams;
+        }
+    }
+
+    auto cparams = common_context_params_to_llama(params);
+
+    llama_context * lctx = llama_init_from_model(model, cparams);
+    if (lctx == NULL) {
+        LOG_ERR("%s: failed to create context with model '%s'\n", __func__, params.model.path.c_str());
+        llama_model_free(model);
+        return iparams;
+    }
+
+    if (params.ctx_shift && !llama_kv_self_can_shift(lctx)) {
+        LOG_WRN("%s: KV cache shifting is not supported for this context, disabling KV cache shifting\n", __func__);
+        params.ctx_shift = false;
+    }
+
+    if (!params.control_vectors.empty()) {
+        if (params.control_vector_layer_start <= 0) params.control_vector_layer_start = 1;
+        if (params.control_vector_layer_end   <= 0) params.control_vector_layer_end   = llama_model_n_layer(model);
+
+        const auto cvec = common_control_vector_load(params.control_vectors);
+        if (cvec.n_embd == -1) {
+            llama_free(lctx);
+            llama_model_free(model);
+
+            return iparams;
+        }
+
+        int err = llama_apply_adapter_cvec(
+                lctx,
+                cvec.data.data(),
+                cvec.data.size(),
+                cvec.n_embd,
+                params.control_vector_layer_start,
+                params.control_vector_layer_end);
+        if (err) {
+            llama_free(lctx);
+            llama_model_free(model);
+
+            return iparams;
+        }
+    }
+
+    // load and optionally apply lora adapters
+    for (auto & la : params.lora_adapters) {
+        llama_adapter_lora_ptr lora;
+        lora.reset(llama_adapter_lora_init(model, la.path.c_str()));
+        if (lora == nullptr) {
+            LOG_ERR("%s: failed to apply lora adapter '%s'\n", __func__, la.path.c_str());
+            llama_free(lctx);
+            llama_model_free(model);
+            return iparams;
+        }
+
+        la.ptr = lora.get();
+        iparams.lora.emplace_back(std::move(lora)); // copy to list of loaded adapters
+    }
+
+    if (!params.lora_init_without_apply) {
+        common_set_adapter_lora(lctx, params.lora_adapters);
+    }
+
+    if (params.sampling.ignore_eos && llama_vocab_eos(vocab) == LLAMA_TOKEN_NULL) {
+        LOG_WRN("%s: warning: vocab does not have an EOS token, ignoring --ignore-eos\n", __func__);
+        params.sampling.ignore_eos = false;
+    }
+
+    if (params.sampling.ignore_eos) {
+        for (llama_token i = 0; i < llama_vocab_n_tokens(vocab); i++) {
+            if (llama_vocab_is_eog(vocab, i)) {
+                LOG_INF("%s: added %s logit bias = %f\n", __func__, common_token_to_piece(lctx, i).c_str(), -INFINITY);
+                params.sampling.logit_bias.push_back({i, -INFINITY});
+            }
+        }
+    }
+
+    if (params.sampling.penalty_last_n == -1) {
+        LOG_INF("%s: setting penalty_last_n to ctx_size = %d\n", __func__, llama_n_ctx(lctx));
+        params.sampling.penalty_last_n = llama_n_ctx(lctx);
+    }
+
+    if (params.sampling.dry_penalty_last_n == -1) {
+        LOG_INF("%s: setting dry_penalty_last_n to ctx_size = %d\n", __func__, llama_n_ctx(lctx));
+        params.sampling.dry_penalty_last_n = llama_n_ctx(lctx);
+    }
+
+    if (params.warmup) {
+        LOG_WRN("%s: warming up the model with an empty run - please wait ... (--no-warmup to disable)\n", __func__);
+
+        llama_set_warmup(lctx, true);
+
+        std::vector tmp;
+        llama_token bos = llama_vocab_bos(vocab);
+        llama_token eos = llama_vocab_eos(vocab);
+
+        // some models (e.g. T5) don't have a BOS token
+        if (bos != LLAMA_TOKEN_NULL) {
+            tmp.push_back(bos);
+        }
+        if (eos != LLAMA_TOKEN_NULL) {
+            tmp.push_back(eos);
+        }
+        if (tmp.empty()) {
+            tmp.push_back(0);
+        }
+
+        if (llama_model_has_encoder(model)) {
+            llama_encode(lctx, llama_batch_get_one(tmp.data(), tmp.size()));
+            llama_token decoder_start_token_id = llama_model_decoder_start_token(model);
+            if (decoder_start_token_id == LLAMA_TOKEN_NULL) {
+                decoder_start_token_id = bos;
+            }
+            tmp.clear();
+            tmp.push_back(decoder_start_token_id);
+        }
+        if (llama_model_has_decoder(model)) {
+            llama_decode(lctx, llama_batch_get_one(tmp.data(), std::min(tmp.size(), (size_t) params.n_batch)));
+        }
+        llama_kv_self_clear(lctx);
+        llama_synchronize(lctx);
+        llama_perf_context_reset(lctx);
+        llama_set_warmup(lctx, false);
+    }
+
+    iparams.model.reset(model);
+    iparams.context.reset(lctx);
+
+    return iparams;
+}
+
+std::string get_model_endpoint() {
+    const char * model_endpoint_env = getenv("MODEL_ENDPOINT");
+    // We still respect the use of environment-variable "HF_ENDPOINT" for backward-compatibility.
+    const char * hf_endpoint_env = getenv("HF_ENDPOINT");
+    const char * endpoint_env = model_endpoint_env ? model_endpoint_env : hf_endpoint_env;
+    std::string model_endpoint = "https://huggingface.co/";
+    if (endpoint_env) {
+        model_endpoint = endpoint_env;
+        if (model_endpoint.back() != '/') model_endpoint += '/';
+    }
+    return model_endpoint;
+}
+
+void common_set_adapter_lora(struct llama_context * ctx, std::vector & lora) {
+    llama_clear_adapter_lora(ctx);
+    for (auto & la : lora) {
+        if (la.scale != 0.0f) {
+            llama_set_adapter_lora(ctx, la.ptr, la.scale);
+        }
+    }
+}
+
+struct llama_model_params common_model_params_to_llama(common_params & params) {
+    auto mparams = llama_model_default_params();
+
+    if (!params.devices.empty()) {
+        mparams.devices = params.devices.data();
+    }
+
+    if (params.n_gpu_layers != -1) {
+        mparams.n_gpu_layers = params.n_gpu_layers;
+    }
+
+    mparams.main_gpu        = params.main_gpu;
+    mparams.split_mode      = params.split_mode;
+    mparams.tensor_split    = params.tensor_split;
+    mparams.use_mmap        = params.use_mmap;
+    mparams.use_mlock       = params.use_mlock;
+    mparams.check_tensors   = params.check_tensors;
+
+    if (params.kv_overrides.empty()) {
+        mparams.kv_overrides = NULL;
+    } else {
+        GGML_ASSERT(params.kv_overrides.back().key[0] == 0 && "KV overrides not terminated with empty key");
+        mparams.kv_overrides = params.kv_overrides.data();
+    }
+
+    if (params.tensor_buft_overrides.empty()) {
+        mparams.tensor_buft_overrides = NULL;
+    } else {
+        GGML_ASSERT(params.tensor_buft_overrides.back().pattern == nullptr && "Tensor buffer overrides not terminated with empty pattern");
+        mparams.tensor_buft_overrides = params.tensor_buft_overrides.data();
+    }
+
+    return mparams;
+}
+
+struct llama_context_params common_context_params_to_llama(const common_params & params) {
+    auto cparams = llama_context_default_params();
+
+    cparams.n_ctx             = params.n_ctx;
+    cparams.n_seq_max         = params.n_parallel;
+    cparams.n_batch           = params.n_batch;
+    cparams.n_ubatch          = params.n_ubatch;
+    cparams.n_threads         = params.cpuparams.n_threads;
+    cparams.n_threads_batch   = params.cpuparams_batch.n_threads == -1 ?
+                                params.cpuparams.n_threads : params.cpuparams_batch.n_threads;
+    cparams.embeddings        = params.embedding;
+    cparams.rope_scaling_type = params.rope_scaling_type;
+    cparams.rope_freq_base    = params.rope_freq_base;
+    cparams.rope_freq_scale   = params.rope_freq_scale;
+    cparams.yarn_ext_factor   = params.yarn_ext_factor;
+    cparams.yarn_attn_factor  = params.yarn_attn_factor;
+    cparams.yarn_beta_fast    = params.yarn_beta_fast;
+    cparams.yarn_beta_slow    = params.yarn_beta_slow;
+    cparams.yarn_orig_ctx     = params.yarn_orig_ctx;
+    cparams.pooling_type      = params.pooling_type;
+    cparams.attention_type    = params.attention_type;
+    cparams.defrag_thold      = params.defrag_thold;
+    cparams.cb_eval           = params.cb_eval;
+    cparams.cb_eval_user_data = params.cb_eval_user_data;
+    cparams.offload_kqv       = !params.no_kv_offload;
+    cparams.flash_attn        = params.flash_attn;
+    cparams.no_perf           = params.no_perf;
+    cparams.op_offload        = !params.no_op_offload;
+
+    if (params.reranking) {
+        cparams.embeddings    = true;
+        cparams.pooling_type  = LLAMA_POOLING_TYPE_RANK;
+    }
+
+    cparams.type_k = params.cache_type_k;
+    cparams.type_v = params.cache_type_v;
+
+    return cparams;
+}
+
+struct ggml_threadpool_params ggml_threadpool_params_from_cpu_params(const cpu_params & params) {
+    struct ggml_threadpool_params tpp;
+
+    ggml_threadpool_params_init(&tpp, params.n_threads); // setup the defaults
+
+    if (params.mask_valid) {
+        std::memcpy(&tpp.cpumask, ¶ms.cpumask, GGML_MAX_N_THREADS);
+    }
+
+    tpp.prio       = params.priority;
+    tpp.poll       = params.poll;
+    tpp.strict_cpu = params.strict_cpu;
+
+    return tpp;
+}
+
+//
+// Batch utils
+//
+
+void common_batch_clear(struct llama_batch & batch) {
+    batch.n_tokens = 0;
+}
+
+void common_batch_add(
+                 struct llama_batch & batch,
+                        llama_token   id,
+                          llama_pos   pos,
+    const std::vector & seq_ids,
+                               bool   logits) {
+    GGML_ASSERT(batch.seq_id[batch.n_tokens] && "llama_batch size exceeded");
+
+    batch.token   [batch.n_tokens] = id;
+    batch.pos     [batch.n_tokens] = pos;
+    batch.n_seq_id[batch.n_tokens] = seq_ids.size();
+    for (size_t i = 0; i < seq_ids.size(); ++i) {
+        batch.seq_id[batch.n_tokens][i] = seq_ids[i];
+    }
+    batch.logits  [batch.n_tokens] = logits;
+
+    batch.n_tokens++;
+}
+
+//
+// Token utils
+//
+
+size_t common_lcp(const llama_tokens & a, const llama_tokens & b) {
+    size_t i;
+    for (i = 0; i < a.size() && i < b.size() && a[i] == b[i]; i++) {}
+
+    return i;
+}
+
+size_t common_lcs(const llama_tokens & a, const llama_tokens & b) {
+    // check for empty sequences
+    if (a.empty() || b.empty()) {
+        return 0;
+    }
+
+    // get the lengths of the input sequences
+    size_t a_len = a.size();
+    size_t b_len = b.size();
+
+    // initialize the maximum length of the longest common subsequence (LCS)
+    size_t max_length = 0;
+
+    // use two rows instead of a 2D matrix to optimize space
+    std::vector prev_row(b_len + 1, 0);
+    std::vector curr_row(b_len + 1, 0);
+
+    // iterate through the elements of a
+    for (size_t i = 1; i <= a_len; i++) {
+        // iterate through the elements of b
+        for (size_t j = 1; j <= b_len; j++) {
+            // if elements at the current positions match
+            if (a[i - 1] == b[j - 1]) {
+                // if it's the first element of either sequences, set LCS length to 1
+                if (i == 1 || j == 1) {
+                    curr_row[j] = 1;
+                } else {
+                    // increment LCS length by 1 compared to the previous element
+                    curr_row[j] = prev_row[j - 1] + 1;
+                }
+
+                // update max_length if necessary
+                if (curr_row[j] > max_length) {
+                    max_length = curr_row[j];
+                }
+            } else {
+                // reset LCS length if elements don't match
+                curr_row[j] = 0;
+            }
+        }
+
+        // update the previous row for the next iteration
+        prev_row = curr_row;
+    }
+
+    // return the maximum length of the LCS
+    return max_length;
+}
+
+//
+// Vocab utils
+//
+
+std::vector common_tokenize(
+  const struct llama_context * ctx,
+           const std::string & text,
+                        bool   add_special,
+                        bool   parse_special) {
+    const llama_model * model = llama_get_model(ctx);
+    const llama_vocab * vocab = llama_model_get_vocab(model);
+    return common_tokenize(vocab, text, add_special, parse_special);
+}
+
+std::vector common_tokenize(
+    const struct llama_vocab * vocab,
+           const std::string & text,
+                        bool   add_special,
+                        bool   parse_special) {
+    // upper limit for the number of tokens
+    int n_tokens = text.length() + 2 * add_special;
+    std::vector result(n_tokens);
+    n_tokens = llama_tokenize(vocab, text.data(), text.length(), result.data(), result.size(), add_special, parse_special);
+    if (n_tokens < 0) {
+        result.resize(-n_tokens);
+        int check = llama_tokenize(vocab, text.data(), text.length(), result.data(), result.size(), add_special, parse_special);
+        GGML_ASSERT(check == -n_tokens);
+    } else {
+        result.resize(n_tokens);
+    }
+    return result;
+}
+
+std::string common_token_to_piece(const struct llama_context * ctx, llama_token token, bool special) {
+    const llama_model * model = llama_get_model(ctx);
+    const llama_vocab * vocab = llama_model_get_vocab(model);
+    return common_token_to_piece(vocab, token, special);
+}
+
+std::string common_token_to_piece(const struct llama_vocab * vocab, llama_token token, bool special) {
+    std::string piece;
+    piece.resize(piece.capacity());  // using string internal cache, 15 bytes + '\n'
+    const int n_chars = llama_token_to_piece(vocab, token, &piece[0], piece.size(), 0, special);
+    if (n_chars < 0) {
+        piece.resize(-n_chars);
+        int check = llama_token_to_piece(vocab, token, &piece[0], piece.size(), 0, special);
+        GGML_ASSERT(check == -n_chars);
+    }
+    else {
+        piece.resize(n_chars);
+    }
+
+    return piece;
+}
+
+std::string common_detokenize(const struct llama_context * ctx, const std::vector & tokens, bool special) {
+    const llama_model * model = llama_get_model(ctx);
+    const llama_vocab * vocab = llama_model_get_vocab(model);
+    return common_detokenize(vocab, tokens, special);
+}
+
+std::string common_detokenize(const struct llama_vocab * vocab, const std::vector & tokens, bool special) {
+    std::string text;
+    text.resize(std::max(text.capacity(), tokens.size()));
+    int32_t n_chars = llama_detokenize(vocab, tokens.data(), (int32_t)tokens.size(), &text[0], (int32_t)text.size(), false, special);
+    if (n_chars < 0) {
+        text.resize(-n_chars);
+        n_chars = llama_detokenize(vocab, tokens.data(), (int32_t)tokens.size(), &text[0], (int32_t)text.size(), false, special);
+        GGML_ASSERT(n_chars <= (int32_t)text.size());  // whitespace trimming is performed after per-token detokenization
+    }
+
+    text.resize(n_chars);
+
+    // NOTE: the original tokenizer decodes bytes after collecting the pieces.
+    return text;
+}
+
+//
+// KV cache utils
+//
+
+void common_kv_cache_dump_view(const llama_kv_cache_view & view, int row_size) {
+    static const char slot_chars[] = ".123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz+";
+
+    printf("=== Dumping KV cache. total cells %d, max sequences per cell %d, populated cells %d, total tokens in cache %d, largest empty slot=%d @ %d",
+        view.n_cells, view.n_seq_max, view.used_cells, view.token_count, view.max_contiguous, view.max_contiguous_idx);
+
+    llama_kv_cache_view_cell * c_curr = view.cells;
+    llama_seq_id * cs_curr = view.cells_sequences;
+
+    for (int i = 0; i < view.n_cells; i++, c_curr++, cs_curr += view.n_seq_max) {
+        if (i % row_size == 0) {
+            printf("\n%5d: ", i);
+        }
+        int seq_count = 0;
+        for (int j = 0; j < view.n_seq_max; j++) {
+            if (cs_curr[j] >= 0) { seq_count++; }
+        }
+        putchar(slot_chars[std::min(sizeof(slot_chars) - 2, size_t(seq_count))]);
+    }
+
+    printf("\n=== Done dumping\n");
+}
+
+void common_kv_cache_dump_view_seqs(const llama_kv_cache_view & view, int row_size) {
+    static const char slot_chars[] = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz";
+
+    printf("=== Dumping KV cache. total cells %d, max sequences per cell %d, populated cells %d, total tokens in cache %d, largest empty slot=%d @ %d\n",
+        view.n_cells, view.n_seq_max, view.used_cells, view.token_count, view.max_contiguous, view.max_contiguous_idx);
+
+    std::unordered_map seqs;
+    llama_kv_cache_view_cell * c_curr = view.cells;
+    llama_seq_id * cs_curr = view.cells_sequences;
+
+    for (int i = 0; i < view.n_cells; i++, c_curr++, cs_curr += view.n_seq_max) {
+        for (int j = 0; j < view.n_seq_max; j++) {
+            if (cs_curr[j] < 0) { continue; }
+            if (seqs.find(cs_curr[j]) == seqs.end()) {
+                if (seqs.size() + 1 >= sizeof(slot_chars)) { break; }
+                const size_t sz = seqs.size();
+                seqs[cs_curr[j]] = sz;
+            }
+        }
+        if (seqs.size() + 1 >= sizeof(slot_chars)) { break; }
+    }
+
+    printf("=== Sequence legend: ");
+    for (const auto & it : seqs) {
+        printf("%zu=%d, ", it.second, it.first);
+    }
+    printf("'+'=other sequence ids");
+
+    c_curr = view.cells;
+    cs_curr = view.cells_sequences;
+    for (int i = 0; i < view.n_cells; i++, c_curr++, cs_curr += view.n_seq_max) {
+        if (i % row_size == 0) {
+            printf("\n%5d: ", i);
+        }
+        for (int j = 0; j < view.n_seq_max; j++) {
+            if (cs_curr[j] >= 0) {
+                const auto & it = seqs.find(cs_curr[j]);
+                putchar(it != seqs.end() ? int(slot_chars[it->second]) : '+');
+            } else {
+                putchar('.');
+            }
+        }
+        putchar(' ');
+    }
+
+    printf("\n=== Done dumping\n");
+}
+
+//
+// Embedding utils
+//
+
+void common_embd_normalize(const float * inp, float * out, int n, int embd_norm) {
+    double sum = 0.0;
+
+    switch (embd_norm) {
+        case -1: // no normalisation
+            sum = 1.0;
+            break;
+        case 0: // max absolute
+            for (int i = 0; i < n; i++) {
+                if (sum < std::abs(inp[i])) {
+                    sum = std::abs(inp[i]);
+                }
+            }
+            sum /= 32760.0; // make an int16 range
+            break;
+        case 2: // euclidean
+            for (int i = 0; i < n; i++) {
+                sum += inp[i] * inp[i];
+            }
+            sum = std::sqrt(sum);
+            break;
+        default: // p-norm (euclidean is p-norm p=2)
+            for (int i = 0; i < n; i++) {
+                sum += std::pow(std::abs(inp[i]), embd_norm);
+            }
+            sum = std::pow(sum, 1.0 / embd_norm);
+            break;
+    }
+
+    const float norm = sum > 0.0 ? 1.0 / sum : 0.0f;
+
+    for (int i = 0; i < n; i++) {
+        out[i] = inp[i] * norm;
+    }
+}
+
+float common_embd_similarity_cos(const float * embd1, const float * embd2, int n){
+    double sum  = 0.0;
+    double sum1 = 0.0;
+    double sum2 = 0.0;
+
+    for (int i = 0; i < n; i++) {
+        sum  += embd1[i] * embd2[i];
+        sum1 += embd1[i] * embd1[i];
+        sum2 += embd2[i] * embd2[i];
+    }
+
+    // Handle the case where one or both vectors are zero vectors
+    if (sum1 == 0.0 || sum2 == 0.0) {
+        if (sum1 == 0.0 && sum2 == 0.0) {
+            return 1.0f; // two zero vectors are similar
+        }
+        return 0.0f;
+    }
+
+    return sum / (sqrt(sum1) * sqrt(sum2));
+}
+
+//
+// Control vector utils
+//
+
+static common_control_vector_data common_control_vector_load_one(const common_control_vector_load_info & load_info) {
+    common_control_vector_data result = { -1, {} };
+
+    ggml_context * ctx = nullptr;
+    struct gguf_init_params meta_gguf_params = {
+        /* .no_alloc = */ false,
+        /* .ctx      = */ &ctx,
+    };
+    struct gguf_context * ctx_gguf = gguf_init_from_file(load_info.fname.c_str(), meta_gguf_params);
+    if (!ctx_gguf) {
+        LOG_ERR("%s: failed to load control vector file from %s\n", __func__, load_info.fname.c_str());
+        return result;
+    }
+
+    int32_t n_tensors = gguf_get_n_tensors(ctx_gguf);
+    if (n_tensors == 0) {
+        LOG_WRN("%s: no direction tensors found in %s\n", __func__, load_info.fname.c_str());
+    }
+
+    for (int i = 0; i < n_tensors; i++) {
+        std::string name = gguf_get_tensor_name(ctx_gguf, i);
+
+        int layer_idx = -1;
+
+        // split on '.'
+        size_t dotpos = name.find('.');
+        if (dotpos != std::string::npos && name.substr(0, dotpos) == "direction") {
+            try {
+                layer_idx = std::stoi(name.substr(dotpos + 1));
+            } catch (...) {
+                layer_idx = -1;
+            }
+        }
+        if (layer_idx < 0) {
+            LOG_ERR("%s: invalid/unparsable direction tensor layer index in %s\n", __func__, load_info.fname.c_str());
+            result.n_embd = -1;
+            break;
+        } else if (layer_idx == 0) {
+            LOG_ERR("%s: invalid (zero) direction tensor layer index in %s\n", __func__, load_info.fname.c_str());
+            result.n_embd = -1;
+            break;
+        }
+
+        struct ggml_tensor * tensor = ggml_get_tensor(ctx, name.c_str());
+        if (tensor->type != GGML_TYPE_F32) {
+            LOG_ERR("%s: invalid (non-F32) direction tensor type in %s\n", __func__, load_info.fname.c_str());
+            result.n_embd = -1;
+            break;
+        }
+        if (ggml_n_dims(tensor) != 1) {
+            LOG_ERR("%s: invalid (non-1D) direction tensor shape in %s\n", __func__, load_info.fname.c_str());
+            result.n_embd = -1;
+            break;
+        }
+
+        if (result.n_embd == -1) {
+            result.n_embd = ggml_nelements(tensor);
+        } else if (ggml_nelements(tensor) != result.n_embd) {
+            LOG_ERR("%s: direction tensor in %s does not match previous dimensions\n", __func__, load_info.fname.c_str());
+            result.n_embd = -1;
+            break;
+        }
+
+        // extend if necessary - do not store data for layer 0 (it's not used)
+        result.data.resize(std::max(result.data.size(), static_cast(result.n_embd * layer_idx)), 0.0f);
+
+        const float * src = (const float *) tensor->data;
+        float * dst = result.data.data() + result.n_embd * (layer_idx - 1);  // layer 1 at [0]
+        for (int j = 0; j < result.n_embd; j++) {
+            dst[j] += src[j] * load_info.strength;  // allows multiple directions for same layer in same file
+        }
+
+    }
+
+    if (result.n_embd == -1) {
+        LOG_WRN("%s: skipping %s due to invalid direction tensors\n", __func__, load_info.fname.c_str());
+        result.data.clear();
+    }
+
+    gguf_free(ctx_gguf);
+    ggml_free(ctx);
+
+    return result;
+}
+
+common_control_vector_data common_control_vector_load(const std::vector & load_infos) {
+    common_control_vector_data result = { -1, {} };
+
+    for (const auto & info : load_infos) {
+        auto cur = common_control_vector_load_one(info);
+
+        if (cur.n_embd == -1) {
+            result.n_embd = -1;
+            break;
+        }
+        if (result.n_embd != -1 && result.n_embd != cur.n_embd) {
+            LOG_ERR("%s: control vectors in %s does not match previous dimensions\n", __func__, info.fname.c_str());
+            result.n_embd = -1;
+            break;
+        }
+
+        if (result.n_embd == -1) {
+            result = std::move(cur);
+        } else {
+            result.data.resize(std::max(result.data.size(), cur.data.size()), 0.0f);  // extend if necessary
+            for (size_t i = 0; i < cur.data.size(); i++) {
+                result.data[i] += cur.data[i];
+            }
+        }
+    }
+
+    if (result.n_embd == -1) {
+        LOG_ERR("%s: no valid control vector files passed\n", __func__);
+        result.data.clear();
+    }
+
+    return result;
+}
+
+ggml_opt_dataset_t common_opt_dataset_init(struct llama_context * ctx, const std::vector & tokens, int64_t stride) {
+    const int64_t ne_datapoint = llama_n_ctx(ctx);
+    const int64_t ndata        = (tokens.size() - ne_datapoint - 1) / stride;
+    ggml_opt_dataset_t result = ggml_opt_dataset_init(
+        GGML_TYPE_I32, GGML_TYPE_I32, ne_datapoint, ne_datapoint, ndata, /*ndata_shard =*/ 1);
+
+    llama_token * data   = (llama_token *) ggml_opt_dataset_data(result)->data;
+    llama_token * labels = (llama_token *) ggml_opt_dataset_labels(result)->data;
+
+    for (int64_t idata = 0; idata < ndata; ++idata) {
+        memcpy(data   + idata*ne_datapoint, tokens.data() + idata*stride + 0, ne_datapoint*sizeof(llama_token));
+        memcpy(labels + idata*ne_datapoint, tokens.data() + idata*stride + 1, ne_datapoint*sizeof(llama_token));
+    }
+
+    return result;
+}
diff --git a/llama/llama.cpp/common/common.go b/llama/llama.cpp/common/common.go
new file mode 100644
index 0000000..ebbb738
--- /dev/null
+++ b/llama/llama.cpp/common/common.go
@@ -0,0 +1,6 @@
+package common
+
+// #cgo CXXFLAGS: -std=c++11
+// #cgo CPPFLAGS: -I${SRCDIR}/../include
+// #cgo CPPFLAGS: -I${SRCDIR}/../../../ml/backend/ggml/ggml/include
+import "C"
diff --git a/llama/llama.cpp/common/common.h b/llama/llama.cpp/common/common.h
new file mode 100644
index 0000000..dea3426
--- /dev/null
+++ b/llama/llama.cpp/common/common.h
@@ -0,0 +1,674 @@
+// Various helper functions and utilities
+
+#pragma once
+
+#include "llama-cpp.h"
+
+#include 
+#include 
+#include 
+#include 
+
+#ifdef _WIN32
+#define DIRECTORY_SEPARATOR '\\'
+#else
+#define DIRECTORY_SEPARATOR '/'
+#endif // _WIN32
+
+#define die(msg)          do { fputs("error: " msg "\n", stderr);                exit(1); } while (0)
+#define die_fmt(fmt, ...) do { fprintf(stderr, "error: " fmt "\n", __VA_ARGS__); exit(1); } while (0)
+
+#define print_build_info() do {                                                                     \
+    fprintf(stderr, "%s: build = %d (%s)\n",      __func__, LLAMA_BUILD_NUMBER, LLAMA_COMMIT);      \
+    fprintf(stderr, "%s: built with %s for %s\n", __func__, LLAMA_COMPILER, LLAMA_BUILD_TARGET);    \
+} while(0)
+
+#define DEFAULT_MODEL_PATH "models/7B/ggml-model-f16.gguf"
+
+struct common_adapter_lora_info {
+    std::string path;
+    float scale;
+
+    struct llama_adapter_lora * ptr;
+};
+
+using llama_tokens = std::vector;
+
+// build info
+extern int LLAMA_BUILD_NUMBER;
+extern const char * LLAMA_COMMIT;
+extern const char * LLAMA_COMPILER;
+extern const char * LLAMA_BUILD_TARGET;
+
+struct common_control_vector_load_info;
+
+//
+// CPU utils
+//
+
+struct cpu_params {
+    int      n_threads                   = -1;
+    bool     cpumask[GGML_MAX_N_THREADS] = {false}; // CPU affinity mask.
+    bool     mask_valid                  = false;   // Default: any CPU
+    enum ggml_sched_priority  priority   = GGML_SCHED_PRIO_NORMAL;  // Scheduling prio : (0 - normal, 1 - medium, 2 - high, 3 - realtime)
+    bool     strict_cpu                  = false;   // Use strict CPU placement
+    uint32_t poll                        = 50;      // Polling (busywait) level (0 - no polling, 100 - mostly polling)
+};
+
+int32_t cpu_get_num_physical_cores();
+int32_t cpu_get_num_math();
+
+//
+// Common params
+//
+
+enum llama_example {
+    LLAMA_EXAMPLE_COMMON,
+    LLAMA_EXAMPLE_SPECULATIVE,
+    LLAMA_EXAMPLE_MAIN,
+    LLAMA_EXAMPLE_EMBEDDING,
+    LLAMA_EXAMPLE_PERPLEXITY,
+    LLAMA_EXAMPLE_RETRIEVAL,
+    LLAMA_EXAMPLE_PASSKEY,
+    LLAMA_EXAMPLE_IMATRIX,
+    LLAMA_EXAMPLE_BENCH,
+    LLAMA_EXAMPLE_SERVER,
+    LLAMA_EXAMPLE_CVECTOR_GENERATOR,
+    LLAMA_EXAMPLE_EXPORT_LORA,
+    LLAMA_EXAMPLE_LLAVA,
+    LLAMA_EXAMPLE_LOOKUP,
+    LLAMA_EXAMPLE_PARALLEL,
+    LLAMA_EXAMPLE_TTS,
+
+    LLAMA_EXAMPLE_COUNT,
+};
+
+enum common_sampler_type {
+    COMMON_SAMPLER_TYPE_NONE        = 0,
+    COMMON_SAMPLER_TYPE_DRY         = 1,
+    COMMON_SAMPLER_TYPE_TOP_K       = 2,
+    COMMON_SAMPLER_TYPE_TOP_P       = 3,
+    COMMON_SAMPLER_TYPE_MIN_P       = 4,
+  //COMMON_SAMPLER_TYPE_TFS_Z       = 5,
+    COMMON_SAMPLER_TYPE_TYPICAL_P   = 6,
+    COMMON_SAMPLER_TYPE_TEMPERATURE = 7,
+    COMMON_SAMPLER_TYPE_XTC         = 8,
+    COMMON_SAMPLER_TYPE_INFILL      = 9,
+    COMMON_SAMPLER_TYPE_PENALTIES   = 10,
+    COMMON_SAMPLER_TYPE_TOP_N_SIGMA = 11,
+};
+
+// dimensionality reduction methods, used by cvector-generator
+enum dimre_method {
+    DIMRE_METHOD_PCA,
+    DIMRE_METHOD_MEAN,
+};
+
+enum common_conversation_mode {
+    COMMON_CONVERSATION_MODE_DISABLED = 0,
+    COMMON_CONVERSATION_MODE_ENABLED  = 1,
+    COMMON_CONVERSATION_MODE_AUTO     = 2,
+};
+
+enum common_grammar_trigger_type {
+    COMMON_GRAMMAR_TRIGGER_TYPE_TOKEN,
+    COMMON_GRAMMAR_TRIGGER_TYPE_WORD,
+    COMMON_GRAMMAR_TRIGGER_TYPE_PATTERN,
+    COMMON_GRAMMAR_TRIGGER_TYPE_PATTERN_START,
+};
+
+struct common_grammar_trigger {
+    common_grammar_trigger_type type;
+    std::string value;
+    llama_token token = LLAMA_TOKEN_NULL;
+};
+
+// sampling parameters
+struct common_params_sampling {
+    uint32_t seed = LLAMA_DEFAULT_SEED; // the seed used to initialize llama_sampler
+
+    int32_t n_prev             = 64;    // number of previous tokens to remember
+    int32_t n_probs            = 0;     // if greater than 0, output the probabilities of top n_probs tokens.
+    int32_t min_keep           = 0;     // 0 = disabled, otherwise samplers should return at least min_keep tokens
+    int32_t top_k              = 40;    // <= 0 to use vocab size
+    float   top_p              = 0.95f; // 1.0 = disabled
+    float   min_p              = 0.05f; // 0.0 = disabled
+    float   xtc_probability    = 0.00f; // 0.0 = disabled
+    float   xtc_threshold      = 0.10f; // > 0.5 disables XTC
+    float   typ_p              = 1.00f; // typical_p, 1.0 = disabled
+    float   temp               = 0.80f; // <= 0.0 to sample greedily, 0.0 to not output probabilities
+    float   dynatemp_range     = 0.00f; // 0.0 = disabled
+    float   dynatemp_exponent  = 1.00f; // controls how entropy maps to temperature in dynamic temperature sampler
+    int32_t penalty_last_n     = 64;    // last n tokens to penalize (0 = disable penalty, -1 = context size)
+    float   penalty_repeat     = 1.00f; // 1.0 = disabled
+    float   penalty_freq       = 0.00f; // 0.0 = disabled
+    float   penalty_present    = 0.00f; // 0.0 = disabled
+    float   dry_multiplier     = 0.0f;  // 0.0 = disabled;      DRY repetition penalty for tokens extending repetition:
+    float   dry_base           = 1.75f; // 0.0 = disabled;      multiplier * base ^ (length of sequence before token - allowed length)
+    int32_t dry_allowed_length = 2;     // tokens extending repetitions beyond this receive penalty
+    int32_t dry_penalty_last_n = -1;    // how many tokens to scan for repetitions (0 = disable penalty, -1 = context size)
+    int32_t mirostat           = 0;     // 0 = disabled, 1 = mirostat, 2 = mirostat 2.0
+    float   top_n_sigma        = -1.00f;// -1.0 = disabled
+    float   mirostat_tau       = 5.00f; // target entropy
+    float   mirostat_eta       = 0.10f; // learning rate
+    bool    ignore_eos         = false;
+    bool    no_perf            = false; // disable performance metrics
+    bool    timing_per_token   = false;
+
+    std::vector dry_sequence_breakers = {"\n", ":", "\"", "*"};     // default sequence breakers for DRY
+
+
+    std::vector samplers = {
+        COMMON_SAMPLER_TYPE_PENALTIES,
+        COMMON_SAMPLER_TYPE_DRY,
+        COMMON_SAMPLER_TYPE_TOP_N_SIGMA,
+        COMMON_SAMPLER_TYPE_TOP_K,
+        COMMON_SAMPLER_TYPE_TYPICAL_P,
+        COMMON_SAMPLER_TYPE_TOP_P,
+        COMMON_SAMPLER_TYPE_MIN_P,
+        COMMON_SAMPLER_TYPE_XTC,
+        COMMON_SAMPLER_TYPE_TEMPERATURE,
+    };
+
+    std::string                         grammar; // optional BNF-like grammar to constrain sampling
+    bool                                grammar_lazy = false;
+    std::vector grammar_triggers; // optional triggers (for lazy grammars)
+    std::set               preserved_tokens;
+
+    std::vector logit_bias; // logit biases to apply
+
+    // print the parameters into a string
+    std::string print() const;
+};
+
+struct common_params_model {
+    std::string path    = ""; // model local path                                           // NOLINT
+    std::string url     = ""; // model url to download                                      // NOLINT
+    std::string hf_repo = ""; // HF repo                                                    // NOLINT
+    std::string hf_file = ""; // HF file                                                    // NOLINT
+};
+
+struct common_params_speculative {
+    std::vector devices; // devices to use for offloading
+
+    int32_t n_ctx        =     0; // draft context size
+    int32_t n_max        =    16; // maximum number of tokens to draft during speculative decoding
+    int32_t n_min        =     0; // minimum number of draft tokens to use for speculative decoding
+    int32_t n_gpu_layers =    -1; // number of layers to store in VRAM for the draft model (-1 - use default)
+    float   p_split      =  0.1f; // speculative decoding split probability
+    float   p_min        = 0.75f; // minimum speculative decoding probability (greedy)
+
+    struct cpu_params cpuparams;
+    struct cpu_params cpuparams_batch;
+
+    struct common_params_model model;
+};
+
+struct common_params_vocoder {
+    struct common_params_model model;
+
+    std::string speaker_file = ""; // speaker file path                                      // NOLINT
+
+    bool use_guide_tokens = false; // enable guide tokens to improve TTS accuracy            // NOLINT
+};
+
+enum common_reasoning_format {
+    COMMON_REASONING_FORMAT_NONE,
+    COMMON_REASONING_FORMAT_DEEPSEEK, // Extract thinking tag contents and return as `message.reasoning_content`
+};
+
+struct common_params {
+    int32_t n_predict             =    -1; // new tokens to predict
+    int32_t n_ctx                 =  4096; // context size
+    int32_t n_batch               =  2048; // logical batch size for prompt processing (must be >=32 to use BLAS)
+    int32_t n_ubatch              =   512; // physical batch size for prompt processing (must be >=32 to use BLAS)
+    int32_t n_keep                =     0; // number of tokens to keep from initial prompt
+    int32_t n_chunks              =    -1; // max number of chunks to process (-1 = unlimited)
+    int32_t n_parallel            =     1; // number of parallel sequences to decode
+    int32_t n_sequences           =     1; // number of sequences to decode
+    int32_t grp_attn_n            =     1; // group-attention factor
+    int32_t grp_attn_w            =   512; // group-attention width
+    int32_t n_print               =    -1; // print token count every n tokens (-1 = disabled)
+    float   rope_freq_base        =  0.0f; // RoPE base frequency
+    float   rope_freq_scale       =  0.0f; // RoPE frequency scaling factor
+    float   yarn_ext_factor       = -1.0f; // YaRN extrapolation mix factor
+    float   yarn_attn_factor      =  1.0f; // YaRN magnitude scaling factor
+    float   yarn_beta_fast        = 32.0f; // YaRN low correction dim
+    float   yarn_beta_slow        =  1.0f; // YaRN high correction dim
+    int32_t yarn_orig_ctx         =     0; // YaRN original context length
+    float   defrag_thold          =  0.1f; // KV cache defragmentation threshold
+
+    // offload params
+    std::vector devices; // devices to use for offloading
+
+    int32_t n_gpu_layers      = -1;  // number of layers to store in VRAM (-1 - use default)
+    int32_t main_gpu          = 0;   // the GPU that is used for scratch and small tensors
+    float   tensor_split[128] = {0}; // how split tensors should be distributed across GPUs
+
+    enum llama_split_mode split_mode = LLAMA_SPLIT_MODE_LAYER; // how to split the model across GPUs
+
+    struct cpu_params cpuparams;
+    struct cpu_params cpuparams_batch;
+
+    ggml_backend_sched_eval_callback cb_eval = nullptr;
+    void * cb_eval_user_data                 = nullptr;
+
+    ggml_numa_strategy numa = GGML_NUMA_STRATEGY_DISABLED;
+
+    enum llama_rope_scaling_type rope_scaling_type = LLAMA_ROPE_SCALING_TYPE_UNSPECIFIED;
+    enum llama_pooling_type      pooling_type      = LLAMA_POOLING_TYPE_UNSPECIFIED; // pooling type for embeddings
+    enum llama_attention_type    attention_type    = LLAMA_ATTENTION_TYPE_UNSPECIFIED; // attention type for embeddings
+
+    struct common_params_sampling    sampling;
+    struct common_params_speculative speculative;
+    struct common_params_vocoder     vocoder;
+
+    struct common_params_model model;
+
+    std::string model_alias          = ""; // model alias                                                   // NOLINT
+    std::string hf_token             = ""; // HF token                                                      // NOLINT
+    std::string prompt               = "";                                                                  // NOLINT
+    std::string system_prompt        = "";                                                                  // NOLINT
+    std::string prompt_file          = ""; // store the external prompt file name                           // NOLINT
+    std::string path_prompt_cache    = ""; // path to file for saving/loading prompt eval state             // NOLINT
+    std::string input_prefix         = ""; // string to prefix user inputs with                             // NOLINT
+    std::string input_suffix         = ""; // string to suffix user inputs with                             // NOLINT
+    std::string lookup_cache_static  = ""; // path of static ngram cache file for lookup decoding           // NOLINT
+    std::string lookup_cache_dynamic = ""; // path of dynamic ngram cache file for lookup decoding          // NOLINT
+    std::string logits_file          = ""; // file for saving *all* logits                                  // NOLINT
+
+    std::vector in_files;   // all input files
+    std::vector antiprompt; // strings upon which more user input is prompted (a.k.a. reverse prompts)
+    std::vector kv_overrides;
+    std::vector tensor_buft_overrides;
+
+    bool lora_init_without_apply = false; // only load lora to memory, but do not apply it to ctx (user can manually apply lora later using llama_adapter_lora_apply)
+    std::vector lora_adapters; // lora adapter path with user defined scale
+
+    std::vector control_vectors; // control vector with user defined scale
+
+    int32_t verbosity                  = 0;
+    int32_t control_vector_layer_start = -1; // layer range for control vector
+    int32_t control_vector_layer_end   = -1; // layer range for control vector
+
+    int32_t ppl_stride      = 0;     // stride for perplexity calculations. If left at 0, the pre-existing approach will be used.
+    int32_t ppl_output_type = 0;     // = 0 -> ppl output is as usual, = 1 -> ppl output is num_tokens, ppl, one per line
+                                     //                                       (which is more convenient to use for plotting)
+                                     //
+    bool   hellaswag        = false; // compute HellaSwag score over random tasks from datafile supplied in prompt
+    size_t hellaswag_tasks  = 400;   // number of tasks to use when computing the HellaSwag score
+
+    bool   winogrande       = false; // compute Winogrande score over random tasks from datafile supplied in prompt
+    size_t winogrande_tasks = 0;     // number of tasks to use when computing the Winogrande score. If 0, all tasks will be computed
+
+    bool   multiple_choice  = false;  // compute TruthfulQA score over random tasks from datafile supplied in prompt
+    size_t multiple_choice_tasks = 0; // number of tasks to use when computing the TruthfulQA score. If 0, all tasks will be computed
+
+    bool   kl_divergence    = false; // compute KL divergence
+
+    bool usage             = false; // print usage
+    bool completion        = false; // print source-able completion script
+    bool use_color         = false; // use color to distinguish generations and inputs
+    bool special           = false; // enable special token output
+    bool interactive       = false; // interactive mode
+    bool interactive_first = false; // wait for user input immediately
+    bool prompt_cache_all  = false; // save user input and generations to prompt cache
+    bool prompt_cache_ro   = false; // open the prompt cache read-only and do not update it
+
+    bool escape            = true;  // escape "\n", "\r", "\t", "\'", "\"", and "\\"
+    bool multiline_input   = false; // reverse the usage of `\`
+    bool simple_io         = false; // improves compatibility with subprocesses and limited consoles
+    bool cont_batching     = true;  // insert new sequences for decoding on-the-fly
+    bool flash_attn        = false; // flash attention
+    bool no_perf           = false; // disable performance metrics
+    bool ctx_shift         = true;  // context shift on inifinite text generation
+
+    bool input_prefix_bos  = false; // prefix BOS to user inputs, preceding input_prefix
+    bool use_mmap          = true;  // use mmap for faster loads
+    bool use_mlock         = false; // use mlock to keep model in memory
+    bool verbose_prompt    = false; // print prompt tokens before generation
+    bool display_prompt    = true;  // print prompt before generation
+    bool dump_kv_cache     = false; // dump the KV cache contents for debugging purposes
+    bool no_kv_offload     = false; // disable KV offloading
+    bool warmup            = true;  // warmup run
+    bool check_tensors     = false; // validate tensor data
+    bool no_op_offload     = false; // globally disable offload host tensor operations to device
+
+    bool single_turn       = false; // single turn chat conversation
+
+    ggml_type cache_type_k = GGML_TYPE_F16; // KV cache data type for the K
+    ggml_type cache_type_v = GGML_TYPE_F16; // KV cache data type for the V
+
+    common_conversation_mode conversation_mode = COMMON_CONVERSATION_MODE_AUTO;
+
+    // multimodal models (see tools/mtmd)
+    struct common_params_model mmproj;
+    bool mmproj_use_gpu = true;     // use GPU for multimodal model
+    bool no_mmproj = false;         // explicitly disable multimodal model
+    std::vector image; // path to image file(s)
+
+    // embedding
+    bool embedding         = false; // get only sentence embedding
+    int32_t embd_normalize = 2;     // normalisation for embeddings (-1=none, 0=max absolute int16, 1=taxicab, 2=euclidean, >2=p-norm)
+    std::string embd_out   = "";    // empty = default, "array" = [[],[]...], "json" = openai style, "json+" = same "json" + cosine similarity matrix
+    std::string embd_sep   = "\n";  // separator of embeddings
+    bool reranking         = false; // enable reranking support on server
+
+    // server params
+    int32_t port           = 8080;         // server listens on this network port
+    int32_t timeout_read   = 600;          // http read timeout in seconds
+    int32_t timeout_write  = timeout_read; // http write timeout in seconds
+    int32_t n_threads_http = -1;           // number of threads to process HTTP requests (TODO: support threadpool)
+    int32_t n_cache_reuse  = 0;            // min chunk size to reuse from the cache via KV shifting
+
+    std::string hostname      = "127.0.0.1";
+    std::string public_path   = "";                                                                         // NOLINT
+    std::string chat_template = "";                                                                         // NOLINT
+    bool use_jinja = false;                                                                                 // NOLINT
+    bool enable_chat_template = true;
+    common_reasoning_format reasoning_format = COMMON_REASONING_FORMAT_DEEPSEEK;
+
+    std::vector api_keys;
+
+    std::string ssl_file_key  = "";                                                                         // NOLINT
+    std::string ssl_file_cert = "";                                                                         // NOLINT
+
+    // "advanced" endpoints are disabled by default for better security
+    bool webui            = true;
+    bool endpoint_slots   = false;
+    bool endpoint_props   = false; // only control POST requests, not GET
+    bool endpoint_metrics = false;
+
+    bool log_json = false;
+
+    std::string slot_save_path;
+
+    float slot_prompt_similarity = 0.5f;
+
+    // batched-bench params
+    bool is_pp_shared = false;
+
+    std::vector n_pp;
+    std::vector n_tg;
+    std::vector n_pl;
+
+    // retrieval params
+    std::vector context_files; // context files to embed
+
+    int32_t chunk_size = 64; // chunk size for context embedding
+
+    std::string chunk_separator = "\n"; // chunk separator for context embedding
+
+    // passkey params
+    int32_t n_junk = 250; // number of times to repeat the junk text
+    int32_t i_pos  = -1;  // position of the passkey in the junk text
+
+    // imatrix params
+    int32_t n_out_freq  = 10; // output the imatrix every n_out_freq iterations
+    int32_t n_save_freq =  0; // save the imatrix every n_save_freq iterations
+    int32_t i_chunk     =  0; // start processing from this chunk
+
+    bool process_output = false; // collect data for the output tensor
+    bool compute_ppl    = true;  // whether to compute perplexity
+    bool parse_special  = false; // whether to parse special tokens during imatrix tokenization
+
+    // cvector-generator params
+    int n_pca_batch = 100;
+    int n_pca_iterations = 1000;
+    dimre_method cvector_dimre_method = DIMRE_METHOD_PCA;
+    std::string cvector_positive_file = "tools/cvector-generator/positive.txt";
+    std::string cvector_negative_file = "tools/cvector-generator/negative.txt";
+
+    bool spm_infill = false; // suffix/prefix/middle pattern for infill
+
+    // batched-bench params
+    bool batched_bench_output_jsonl = false;
+
+    // common params
+    std::string out_file; // output filename for all example programs
+};
+
+// call once at the start of a program if it uses libcommon
+// initializes the logging system and prints info about the build
+void common_init();
+
+std::string common_params_get_system_info(const common_params & params);
+
+bool parse_cpu_range(const std::string & range, bool(&boolmask)[GGML_MAX_N_THREADS]);
+bool parse_cpu_mask(const std::string & mask, bool(&boolmask)[GGML_MAX_N_THREADS]);
+void postprocess_cpu_params(cpu_params & cpuparams, const cpu_params * role_model = nullptr);
+bool set_process_priority(enum ggml_sched_priority prio);
+
+//
+// String utils
+//
+
+#ifdef __GNUC__
+#    if defined(__MINGW32__) && !defined(__clang__)
+#        define LLAMA_COMMON_ATTRIBUTE_FORMAT(...) __attribute__((format(gnu_printf, __VA_ARGS__)))
+#    else
+#        define LLAMA_COMMON_ATTRIBUTE_FORMAT(...) __attribute__((format(printf, __VA_ARGS__)))
+#    endif
+#else
+#    define LLAMA_COMMON_ATTRIBUTE_FORMAT(...)
+#endif
+
+LLAMA_COMMON_ATTRIBUTE_FORMAT(1, 2)
+std::string string_format(const char * fmt, ...);
+
+std::string string_strip(const std::string & str);
+std::string string_get_sortable_timestamp();
+
+std::string string_join(const std::vector & values, const std::string & separator);
+std::vector string_split(const std::string & str, const std::string & delimiter);
+std::string string_repeat(const std::string & str, size_t n);
+
+void string_replace_all(std::string & s, const std::string & search, const std::string & replace);
+
+std::string regex_escape(const std::string & s);
+
+template
+static std::vector string_split(const std::string & str, char delim) {
+    static_assert(!std::is_same::value, "Please use the specialized version for std::string");
+    std::vector values;
+    std::istringstream str_stream(str);
+    std::string token;
+    while (std::getline(str_stream, token, delim)) {
+        T value;
+        std::istringstream token_stream(token);
+        token_stream >> value;
+        values.push_back(value);
+    }
+    return values;
+}
+
+template<>
+std::vector string_split(const std::string & input, char separator)
+{
+    std::vector parts;
+    size_t begin_pos = 0;
+    size_t separator_pos = input.find(separator);
+    while (separator_pos != std::string::npos) {
+        std::string part = input.substr(begin_pos, separator_pos - begin_pos);
+        parts.emplace_back(part);
+        begin_pos = separator_pos + 1;
+        separator_pos = input.find(separator, begin_pos);
+    }
+    parts.emplace_back(input.substr(begin_pos, separator_pos - begin_pos));
+    return parts;
+}
+
+static bool string_starts_with(const std::string & str,
+                               const std::string & prefix) {  // While we wait for C++20's std::string::starts_with...
+    return str.rfind(prefix, 0) == 0;
+}
+
+static bool string_ends_with(const std::string & str,
+                               const std::string & suffix) {  // While we wait for C++20's std::string::ends_with...
+    return str.size() >= suffix.size() && str.compare(str.size()-suffix.size(), suffix.size(), suffix) == 0;
+}
+
+bool string_parse_kv_override(const char * data, std::vector & overrides);
+void string_process_escapes(std::string & input);
+
+std::string string_from(bool value);
+std::string string_from(const std::vector & values);
+std::string string_from(const struct llama_context * ctx, const std::vector & tokens);
+std::string string_from(const struct llama_context * ctx, const struct llama_batch & batch);
+
+//
+// Filesystem utils
+//
+
+bool fs_validate_filename(const std::string & filename);
+bool fs_create_directory_with_parents(const std::string & path);
+
+std::string fs_get_cache_directory();
+std::string fs_get_cache_file(const std::string & filename);
+
+//
+// Model utils
+//
+
+// note: defines object's lifetime
+struct common_init_result {
+    llama_model_ptr   model;
+    llama_context_ptr context;
+
+    std::vector lora;
+};
+
+struct common_init_result     common_init_from_params(common_params & params);
+
+struct llama_model_params     common_model_params_to_llama  (      common_params & params);
+struct llama_context_params   common_context_params_to_llama(const common_params & params);
+struct ggml_threadpool_params ggml_threadpool_params_from_cpu_params(const cpu_params & params);
+
+// clear LoRA adapters from context, then apply new list of adapters
+void common_set_adapter_lora(struct llama_context * ctx, std::vector & lora);
+
+std::string                   get_model_endpoint();
+
+//
+// Batch utils
+//
+
+void common_batch_clear(struct llama_batch & batch);
+
+void common_batch_add(
+                 struct llama_batch & batch,
+                        llama_token   id,
+                          llama_pos   pos,
+    const std::vector & seq_ids,
+                               bool   logits);
+
+//
+// Token utils
+//
+
+// longest common prefix
+size_t common_lcp(const llama_tokens & a, const llama_tokens & b);
+
+// longet common subsequence
+size_t common_lcs(const llama_tokens & a, const llama_tokens & b);
+
+//
+// Vocab utils
+//
+
+// tokenizes a string into a vector of tokens
+// should work similar to Python's `tokenizer.encode`
+std::vector common_tokenize(
+  const struct llama_context * ctx,
+           const std::string & text,
+                        bool   add_special,
+                        bool   parse_special = false);
+
+std::vector common_tokenize(
+    const struct llama_vocab * vocab,
+           const std::string & text,
+                        bool   add_special,
+                        bool   parse_special = false);
+
+// tokenizes a token into a piece, optionally renders special/control tokens
+// should work similar to Python's `tokenizer.id_to_piece`
+std::string common_token_to_piece(
+        const struct llama_context * ctx,
+                       llama_token   token,
+                       bool          special = true);
+
+std::string common_token_to_piece(
+          const struct llama_vocab * vocab,
+                       llama_token   token,
+                       bool          special = true);
+
+// detokenizes a vector of tokens into a string
+// should work similar to Python's `tokenizer.decode`
+// optionally renders special/control tokens
+std::string common_detokenize(
+            const struct llama_context * ctx,
+        const std::vector & tokens,
+                                  bool   special = true);
+
+std::string common_detokenize(
+              const struct llama_vocab * vocab,
+        const std::vector & tokens,
+                                  bool   special = true);
+
+//
+// KV cache utils
+//
+
+// Dump the KV cache view with the number of sequences per cell.
+void common_kv_cache_dump_view(const llama_kv_cache_view & view, int row_size = 80);
+
+// Dump the KV cache view showing individual sequences in each cell (long output).
+void common_kv_cache_dump_view_seqs(const llama_kv_cache_view & view, int row_size = 40);
+
+//
+// Embedding utils
+//
+
+// TODO: repace embd_norm with an enum
+void common_embd_normalize(const float * inp, float * out, int n, int embd_norm);
+
+float common_embd_similarity_cos(const float * embd1, const float * embd2, int n);
+
+//
+// Control vector utils
+//
+
+struct common_control_vector_data {
+    int n_embd;
+
+    // stores data for layers [1, n_layer] where n_layer = data.size() / n_embd
+    std::vector data;
+};
+
+struct common_control_vector_load_info {
+    float strength;
+
+    std::string fname;
+};
+
+// Load control vectors, scale each by strength, and add them together.
+// On error, returns {-1, empty}
+common_control_vector_data common_control_vector_load(const std::vector & load_infos);
+
+//
+// Split utils
+//
+
+namespace {
+
+const char * const LLM_KV_SPLIT_NO            = "split.no";
+const char * const LLM_KV_SPLIT_COUNT         = "split.count";
+const char * const LLM_KV_SPLIT_TENSORS_COUNT = "split.tensors.count";
+
+}
+
+//
+// training utils
+//
+
+ggml_opt_dataset_t common_opt_dataset_init(struct llama_context * ctx, const std::vector & tokens, int64_t stride);
diff --git a/llama/llama.cpp/common/json-schema-to-grammar.cpp b/llama/llama.cpp/common/json-schema-to-grammar.cpp
new file mode 100644
index 0000000..656b3ec
--- /dev/null
+++ b/llama/llama.cpp/common/json-schema-to-grammar.cpp
@@ -0,0 +1,1027 @@
+#include "json-schema-to-grammar.h"
+#include "common.h"
+
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+
+using json = nlohmann::ordered_json;
+
+static std::string build_repetition(const std::string & item_rule, int min_items, int max_items, const std::string & separator_rule = "") {
+    auto has_max = max_items != std::numeric_limits::max();
+
+    if (max_items == 0) {
+        return "";
+    }
+    if (min_items == 0 && max_items == 1) {
+        return item_rule + "?";
+    }
+
+    if (separator_rule.empty()) {
+        if (min_items == 1 && !has_max) {
+            return item_rule + "+";
+        } else if (min_items == 0 && !has_max) {
+            return item_rule + "*";
+        } else {
+            return item_rule + "{" + std::to_string(min_items) + "," + (has_max ? std::to_string(max_items) : "") + "}";
+        }
+    }
+
+    auto result = item_rule + " " + build_repetition("(" + separator_rule + " " + item_rule + ")", min_items == 0 ? 0 : min_items - 1, has_max ? max_items - 1 : max_items);
+    if (min_items == 0) {
+        result = "(" + result + ")?";
+    }
+    return result;
+}
+
+/* Minimalistic replacement for std::string_view, which is only available from C++17 onwards */
+class string_view {
+    const std::string & _str;
+    const size_t _start;
+    const size_t _end;
+public:
+    string_view(const std::string & str, size_t start = 0, size_t end  = std::string::npos) : _str(str), _start(start), _end(end == std::string::npos ? str.length() : end) {}
+
+    size_t size() const {
+        return _end - _start;
+    }
+
+    size_t length() const {
+        return size();
+    }
+
+    operator std::string() const {
+        return str();
+    }
+
+    std::string str() const {
+        return _str.substr(_start, _end - _start);
+    }
+
+    string_view substr(size_t pos, size_t len = std::string::npos) const {
+        return string_view(_str, _start + pos, len == std::string::npos ? _end : _start + pos + len);
+    }
+
+    char operator[](size_t pos) const {
+        auto index = _start + pos;
+        if (index >= _end) {
+            throw std::out_of_range("string_view index out of range");
+        }
+        return _str[_start + pos];
+    }
+
+    bool operator==(const string_view & other) const {
+        std::string this_str = *this;
+        std::string other_str = other;
+        return this_str == other_str;
+    }
+};
+
+static void _build_min_max_int(int min_value, int max_value, std::stringstream & out, int decimals_left = 16, bool top_level = true) {
+    auto has_min = min_value != std::numeric_limits::min();
+    auto has_max = max_value != std::numeric_limits::max();
+
+    auto digit_range = [&](char from, char to) {
+        out << "[";
+        if (from == to) {
+            out << from;
+        } else {
+            out << from << "-" << to;
+        }
+        out << "]";
+    };
+    auto more_digits = [&](int min_digits, int max_digits) {
+        out << "[0-9]";
+        if (min_digits == max_digits && min_digits == 1) {
+            return;
+        }
+        out << "{";
+        out << min_digits;
+        if (max_digits != min_digits) {
+            out << ",";
+            if (max_digits != std::numeric_limits::max()) {
+                out << max_digits;
+            }
+        }
+        out << "}";
+    };
+    std::function uniform_range =
+        [&](const string_view & from, const string_view & to) {
+            size_t i = 0;
+            while (i < from.length() && i < to.length() && from[i] == to[i]) {
+                i++;
+            }
+            if (i > 0) {
+                out << "\"" << from.substr(0, i).str() << "\"";
+            }
+            if (i < from.length() && i < to.length()) {
+                if (i > 0) {
+                    out << " ";
+                }
+                auto sub_len = from.length() - i - 1;
+                if (sub_len > 0) {
+                    auto from_sub = from.substr(i + 1);
+                    auto to_sub = to.substr(i + 1);
+                    auto sub_zeros = string_repeat("0", sub_len);
+                    auto sub_nines = string_repeat("9", sub_len);
+
+                    auto to_reached = false;
+                    out << "(";
+                    if (from_sub == sub_zeros) {
+                        digit_range(from[i], to[i] - 1);
+                        out << " ";
+                        more_digits(sub_len, sub_len);
+                    } else {
+                        out << "[" << from[i] << "] ";
+                        out << "(";
+                        uniform_range(from_sub, sub_nines);
+                        out << ")";
+                        if (from[i] < to[i] - 1) {
+                            out << " | ";
+                            if (to_sub == sub_nines) {
+                                digit_range(from[i] + 1, to[i]);
+                                to_reached = true;
+                            } else {
+                                digit_range(from[i] + 1, to[i] - 1);
+                            }
+                            out << " ";
+                            more_digits(sub_len, sub_len);
+                        }
+                    }
+                    if (!to_reached) {
+                        out << " | ";
+                        digit_range(to[i], to[i]);
+                        out << " ";
+                        uniform_range(sub_zeros, to_sub);
+                    }
+                    out << ")";
+                } else {
+                    out << "[" << from[i] << "-" << to[i] << "]";
+                }
+            }
+        };
+
+    if (has_min && has_max) {
+        if (min_value < 0 && max_value < 0) {
+            out << "\"-\" (";
+            _build_min_max_int(-max_value, -min_value, out, decimals_left, /* top_level= */ true);
+            out << ")";
+            return;
+        }
+
+        if (min_value < 0) {
+            out << "\"-\" (";
+            _build_min_max_int(0, -min_value, out, decimals_left, /* top_level= */ true);
+            out << ") | ";
+            min_value = 0;
+        }
+
+        auto min_s = std::to_string(min_value);
+        auto max_s = std::to_string(max_value);
+        auto min_digits = min_s.length();
+        auto max_digits = max_s.length();
+
+        for (auto digits = min_digits; digits < max_digits; digits++) {
+            uniform_range(min_s, string_repeat("9", digits));
+            min_s = "1" + string_repeat("0", digits);
+            out << " | ";
+        }
+        uniform_range(min_s, max_s);
+        return;
+    }
+
+    auto less_decimals = std::max(decimals_left - 1, 1);
+
+    if (has_min) {
+        if (min_value < 0) {
+            out << "\"-\" (";
+            _build_min_max_int(std::numeric_limits::min(), -min_value, out, decimals_left, /* top_level= */ false);
+            out << ") | [0] | [1-9] ";
+            more_digits(0, decimals_left - 1);
+        } else if (min_value == 0) {
+            if (top_level) {
+                out << "[0] | [1-9] ";
+                more_digits(0, less_decimals);
+            } else {
+                more_digits(1, decimals_left);
+            }
+        } else if (min_value <= 9) {
+            char c = '0' + min_value;
+            auto range_start = top_level ? '1' : '0';
+            if (c > range_start) {
+                digit_range(range_start, c - 1);
+                out << " ";
+                more_digits(1, less_decimals);
+                out << " | ";
+            }
+            digit_range(c, '9');
+            out << " ";
+            more_digits(0, less_decimals);
+        } else {
+            auto min_s = std::to_string(min_value);
+            auto len = min_s.length();
+            auto c = min_s[0];
+
+            if (c > '1') {
+                digit_range(top_level ? '1' : '0', c - 1);
+                out << " ";
+                more_digits(len, less_decimals);
+                out << " | ";
+            }
+            digit_range(c, c);
+            out << " (";
+            _build_min_max_int(std::stoi(min_s.substr(1)), std::numeric_limits::max(), out, less_decimals, /* top_level= */ false);
+            out << ")";
+            if (c < '9') {
+                out << " | ";
+                digit_range(c + 1, '9');
+                out << " ";
+                more_digits(len - 1, less_decimals);
+            }
+        }
+        return;
+    }
+
+    if (has_max) {
+        if (max_value >= 0) {
+            if (top_level) {
+                out << "\"-\" [1-9] ";
+                more_digits(0, less_decimals);
+                out << " | ";
+            }
+            _build_min_max_int(0, max_value, out, decimals_left, /* top_level= */ true);
+        } else {
+            out << "\"-\" (";
+            _build_min_max_int(-max_value, std::numeric_limits::max(), out, decimals_left, /* top_level= */ false);
+            out << ")";
+        }
+        return;
+    }
+
+    throw std::runtime_error("At least one of min_value or max_value must be set");
+}
+
+const std::string SPACE_RULE = "| \" \" | \"\\n\"{1,2} [ \\t]{0,20}";
+
+struct BuiltinRule {
+    std::string content;
+    std::vector deps;
+};
+
+std::unordered_map PRIMITIVE_RULES = {
+    {"boolean", {"(\"true\" | \"false\") space", {}}},
+    {"decimal-part", {"[0-9]{1,16}", {}}},
+    {"integral-part", {"[0] | [1-9] [0-9]{0,15}", {}}},
+    {"number", {"(\"-\"? integral-part) (\".\" decimal-part)? ([eE] [-+]? integral-part)? space", {"integral-part", "decimal-part"}}},
+    {"integer", {"(\"-\"? integral-part) space", {"integral-part"}}},
+    {"value", {"object | array | string | number | boolean | null", {"object", "array", "string", "number", "boolean", "null"}}},
+    {"object", {"\"{\" space ( string \":\" space value (\",\" space string \":\" space value)* )? \"}\" space", {"string", "value"}}},
+    {"array", {"\"[\" space ( value (\",\" space value)* )? \"]\" space", {"value"}}},
+    {"uuid", {"\"\\\"\" [0-9a-fA-F]{8} \"-\" [0-9a-fA-F]{4} \"-\" [0-9a-fA-F]{4} \"-\" [0-9a-fA-F]{4} \"-\" [0-9a-fA-F]{12} \"\\\"\" space", {}}},
+    {"char",   {"[^\"\\\\\\x7F\\x00-\\x1F] | [\\\\] ([\"\\\\bfnrt] | \"u\" [0-9a-fA-F]{4})", {}}},
+    {"string", {"\"\\\"\" char* \"\\\"\" space", {"char"}}},
+    {"null", {"\"null\" space", {}}},
+};
+
+std::unordered_map STRING_FORMAT_RULES = {
+    {"date", {"[0-9]{4} \"-\" ( \"0\" [1-9] | \"1\" [0-2] ) \"-\" ( \"0\" [1-9] | [1-2] [0-9] | \"3\" [0-1] )", {}}},
+    {"time", {"([01] [0-9] | \"2\" [0-3]) \":\" [0-5] [0-9] \":\" [0-5] [0-9] ( \".\" [0-9]{3} )? ( \"Z\" | ( \"+\" | \"-\" ) ( [01] [0-9] | \"2\" [0-3] ) \":\" [0-5] [0-9] )", {}}},
+    {"date-time", {"date \"T\" time", {"date", "time"}}},
+    {"date-string", {"\"\\\"\" date \"\\\"\" space", {"date"}}},
+    {"time-string", {"\"\\\"\" time \"\\\"\" space", {"time"}}},
+    {"date-time-string", {"\"\\\"\" date-time \"\\\"\" space", {"date-time"}}}
+};
+
+static bool is_reserved_name(const std::string & name) {
+    static std::unordered_set RESERVED_NAMES;
+    if (RESERVED_NAMES.empty()) {
+        RESERVED_NAMES.insert("root");
+        for (const auto &p : PRIMITIVE_RULES) RESERVED_NAMES.insert(p.first);
+        for (const auto &p : STRING_FORMAT_RULES) RESERVED_NAMES.insert(p.first);
+    }
+    return RESERVED_NAMES.find(name) != RESERVED_NAMES.end();
+}
+
+std::regex INVALID_RULE_CHARS_RE("[^a-zA-Z0-9-]+");
+std::regex GRAMMAR_LITERAL_ESCAPE_RE("[\r\n\"]");
+std::regex GRAMMAR_RANGE_LITERAL_ESCAPE_RE("[\r\n\"\\]\\-\\\\]");
+std::unordered_map GRAMMAR_LITERAL_ESCAPES = {
+    {'\r', "\\r"}, {'\n', "\\n"}, {'"', "\\\""}, {'-', "\\-"}, {']', "\\]"}
+};
+
+std::unordered_set NON_LITERAL_SET = {'|', '.', '(', ')', '[', ']', '{', '}', '*', '+', '?'};
+std::unordered_set ESCAPED_IN_REGEXPS_BUT_NOT_IN_LITERALS = {'^', '$', '.', '[', ']', '(', ')', '|', '{', '}', '*', '+', '?'};
+
+static std::string replacePattern(const std::string & input, const std::regex & regex, const std::function & replacement) {
+    std::smatch match;
+    std::string result;
+
+    std::string::const_iterator searchStart(input.cbegin());
+    std::string::const_iterator searchEnd(input.cend());
+
+    while (std::regex_search(searchStart, searchEnd, match, regex)) {
+        result.append(searchStart, searchStart + match.position());
+        result.append(replacement(match));
+        searchStart = match.suffix().first;
+    }
+
+    result.append(searchStart, searchEnd);
+
+    return result;
+}
+
+static std::string format_literal(const std::string & literal) {
+    std::string escaped = replacePattern(literal, GRAMMAR_LITERAL_ESCAPE_RE, [&](const std::smatch & match) {
+        char c = match.str()[0];
+        return GRAMMAR_LITERAL_ESCAPES.at(c);
+    });
+    return "\"" + escaped + "\"";
+}
+
+class SchemaConverter {
+private:
+    friend std::string build_grammar(const std::function & cb, const common_grammar_options & options);
+    std::function _fetch_json;
+    bool _dotall;
+    std::unordered_map _rules;
+    std::unordered_map _refs;
+    std::unordered_set _refs_being_resolved;
+    std::vector _errors;
+    std::vector _warnings;
+
+    std::string _add_rule(const std::string & name, const std::string & rule) {
+        std::string esc_name = regex_replace(name, INVALID_RULE_CHARS_RE, "-");
+        if (_rules.find(esc_name) == _rules.end() || _rules[esc_name] == rule) {
+            _rules[esc_name] = rule;
+            return esc_name;
+        } else {
+            int i = 0;
+            while (_rules.find(esc_name + std::to_string(i)) != _rules.end() && _rules[esc_name + std::to_string(i)] != rule) {
+                i++;
+            }
+            std::string key = esc_name + std::to_string(i);
+            _rules[key] = rule;
+            return key;
+        }
+    }
+
+    std::string _generate_union_rule(const std::string & name, const std::vector & alt_schemas) {
+        std::vector rules;
+        for (size_t i = 0; i < alt_schemas.size(); i++) {
+            rules.push_back(visit(alt_schemas[i], name + (name.empty() ? "alternative-" : "-") + std::to_string(i)));
+        }
+        return string_join(rules, " | ");
+    }
+
+    std::string _visit_pattern(const std::string & pattern, const std::string & name) {
+        if (!(pattern.front() == '^' && pattern.back() == '$')) {
+            _errors.push_back("Pattern must start with '^' and end with '$'");
+            return "";
+        }
+        std::string sub_pattern = pattern.substr(1, pattern.length() - 2);
+        std::unordered_map sub_rule_ids;
+
+        size_t i = 0;
+        size_t length = sub_pattern.length();
+
+        using literal_or_rule = std::pair;
+        auto to_rule = [&](const literal_or_rule & ls) {
+            auto is_literal = ls.second;
+            auto s = ls.first;
+            return is_literal ? "\"" + s + "\"" : s;
+        };
+        std::function transform = [&]() -> literal_or_rule {
+            size_t start = i;
+            std::vector seq;
+
+            auto get_dot = [&]() {
+                std::string rule;
+                if (_dotall) {
+                    rule = "[\\U00000000-\\U0010FFFF]";
+                } else {
+                    rule = "[^\\x0A\\x0D]";
+                }
+                return _add_rule("dot", rule);
+            };
+
+            // Joins the sequence, merging consecutive literals together.
+            auto join_seq = [&]() {
+                std::vector ret;
+
+                std::string literal;
+                auto flush_literal = [&]() {
+                    if (literal.empty()) {
+                        return false;
+                    }
+                    ret.emplace_back(literal, true);
+                    literal.clear();
+                    return true;
+                };
+
+                for (const auto & item : seq) {
+                    auto is_literal = item.second;
+                    if (is_literal) {
+                        literal += item.first;
+                    } else {
+                        flush_literal();
+                        ret.push_back(item);
+                    }
+                }
+                flush_literal();
+
+                std::vector results;
+                for (const auto & item : ret) {
+                    results.push_back(to_rule(item));
+                }
+                return std::make_pair(string_join(results, " "), false);
+            };
+
+            while (i < length) {
+                char c = sub_pattern[i];
+                if (c == '.') {
+                    seq.emplace_back(get_dot(), false);
+                    i++;
+                } else if (c == '(') {
+                    i++;
+                    if (i < length) {
+                        if (sub_pattern[i] == '?') {
+                            _warnings.push_back("Unsupported pattern syntax");
+                        }
+                    }
+                    seq.emplace_back("(" + to_rule(transform()) + ")", false);
+                } else if (c == ')') {
+                    i++;
+                    if (start > 0 && sub_pattern[start - 1] != '(') {
+                        _errors.push_back("Unbalanced parentheses");
+                    }
+                    return join_seq();
+                } else if (c == '[') {
+                    std::string square_brackets = std::string(1, c);
+                    i++;
+                    while (i < length && sub_pattern[i] != ']') {
+                        if (sub_pattern[i] == '\\') {
+                            square_brackets += sub_pattern.substr(i, 2);
+                            i += 2;
+                        } else {
+                            square_brackets += sub_pattern[i];
+                            i++;
+                        }
+                    }
+                    if (i >= length) {
+                        _errors.push_back("Unbalanced square brackets");
+                    }
+                    square_brackets += ']';
+                    i++;
+                    seq.emplace_back(square_brackets, false);
+                } else if (c == '|') {
+                    seq.emplace_back("|", false);
+                    i++;
+                } else if (c == '*' || c == '+' || c == '?') {
+                    seq.back() = std::make_pair(to_rule(seq.back()) + c, false);
+                    i++;
+                } else if (c == '{') {
+                    std::string curly_brackets = std::string(1, c);
+                    i++;
+                    while (i < length && sub_pattern[i] != '}') {
+                        curly_brackets += sub_pattern[i];
+                        i++;
+                    }
+                    if (i >= length) {
+                        _errors.push_back("Unbalanced curly brackets");
+                    }
+                    curly_brackets += '}';
+                    i++;
+                    auto nums = string_split(curly_brackets.substr(1, curly_brackets.length() - 2), ",");
+                    int min_times = 0;
+                    int max_times = std::numeric_limits::max();
+                    try {
+                        if (nums.size() == 1) {
+                            min_times = max_times = std::stoi(nums[0]);
+                        } else if (nums.size() != 2) {
+                            _errors.push_back("Wrong number of values in curly brackets");
+                        } else {
+                            if (!nums[0].empty()) {
+                                min_times = std::stoi(nums[0]);
+                            }
+                            if (!nums[1].empty()) {
+                                max_times = std::stoi(nums[1]);
+                            }
+                        }
+                    } catch (const std::invalid_argument & e) {
+                        _errors.push_back("Invalid number in curly brackets");
+                        return std::make_pair("", false);
+                    }
+                    auto &last = seq.back();
+                    auto &sub = last.first;
+                    auto sub_is_literal = last.second;
+
+                    if (!sub_is_literal) {
+                        std::string & sub_id = sub_rule_ids[sub];
+                        if (sub_id.empty()) {
+                            sub_id = _add_rule(name + "-" + std::to_string(sub_rule_ids.size()), sub);
+                        }
+                        sub = sub_id;
+                    }
+                    seq.back().first = build_repetition(
+                        sub_is_literal ? "\"" + sub + "\"" : sub,
+                        min_times,
+                        max_times,
+                        ""
+                    );
+                    seq.back().second = false;
+                } else {
+                    std::string literal;
+                    auto is_non_literal = [&](char c) {
+                        return NON_LITERAL_SET.find(c) != NON_LITERAL_SET.end();
+                    };
+                    while (i < length) {
+                        if (sub_pattern[i] == '\\' && i < length - 1) {
+                            char next = sub_pattern[i + 1];
+                            if (ESCAPED_IN_REGEXPS_BUT_NOT_IN_LITERALS.find(next) != ESCAPED_IN_REGEXPS_BUT_NOT_IN_LITERALS.end()) {
+                                i++;
+                                literal += sub_pattern[i];
+                                i++;
+                            } else {
+                                literal += sub_pattern.substr(i, 2);
+                                i += 2;
+                            }
+                        } else if (sub_pattern[i] == '"') {
+                            literal += "\\\"";
+                            i++;
+                        } else if (!is_non_literal(sub_pattern[i]) &&
+                                (i == length - 1 || literal.empty() || sub_pattern[i + 1] == '.' || !is_non_literal(sub_pattern[i + 1]))) {
+                            literal += sub_pattern[i];
+                            i++;
+                        } else {
+                            break;
+                        }
+                    }
+                    if (!literal.empty()) {
+                        seq.emplace_back(literal, true);
+                    }
+                }
+            }
+            return join_seq();
+        };
+        return _add_rule(name, "\"\\\"\" (" + to_rule(transform()) + ") \"\\\"\" space");
+    }
+
+    /*
+        Returns a rule that matches a JSON string that is none of the provided strings
+
+        not_strings({"a"})
+            -> ["] ( [a] char+ | [^"a] char* )? ["] space
+        not_strings({"and", "also"})
+            -> ["] ( [a] ([l] ([s] ([o] char+ | [^"o] char*) | [^"s] char*) | [n] ([d] char+ | [^"d] char*) | [^"ln] char*) | [^"a] char* )? ["] space
+    */
+    std::string _not_strings(const std::vector & strings) {
+
+        struct TrieNode {
+            std::map children;
+            bool is_end_of_string;
+
+            TrieNode() : is_end_of_string(false) {}
+
+            void insert(const std::string & string) {
+                auto node = this;
+                for (char c : string) {
+                    node = &node->children[c];
+                }
+                node->is_end_of_string = true;
+            }
+        };
+
+        TrieNode trie;
+        for (const auto & s : strings) {
+            trie.insert(s);
+        }
+
+        std::string char_rule = _add_primitive("char", PRIMITIVE_RULES.at("char"));
+        std::ostringstream out;
+        out << "[\"] ( ";
+        std::function visit = [&](const TrieNode & node) {
+            std::ostringstream rejects;
+            auto first = true;
+            for (const auto & kv : node.children) {
+                rejects << kv.first;
+                if (first) {
+                    first = false;
+                } else {
+                    out << " | ";
+                }
+                out << "[" << kv.first << "]";
+                if (!kv.second.children.empty()) {
+                    out << " (";
+                    visit(kv.second);
+                    out << ")";
+                } else if (kv.second.is_end_of_string) {
+                    out << " " << char_rule << "+";
+                }
+            }
+            if (!node.children.empty()) {
+                if (!first) {
+                    out << " | ";
+                }
+                out << "[^\"" << rejects.str() << "] " << char_rule << "*";
+            }
+        };
+        visit(trie);
+
+        out << " )";
+        if (!trie.is_end_of_string) {
+            out << "?";
+        }
+        out << " [\"] space";
+        return out.str();
+    }
+
+    std::string _resolve_ref(const std::string & ref) {
+        std::string ref_name = ref.substr(ref.find_last_of('/') + 1);
+        if (_rules.find(ref_name) == _rules.end() && _refs_being_resolved.find(ref) == _refs_being_resolved.end()) {
+            _refs_being_resolved.insert(ref);
+            json resolved = _refs[ref];
+            ref_name = visit(resolved, ref_name);
+            _refs_being_resolved.erase(ref);
+        }
+        return ref_name;
+    }
+
+    std::string _build_object_rule(
+        const std::vector> & properties,
+        const std::unordered_set & required,
+        const std::string & name,
+        const json & additional_properties)
+    {
+        std::vector required_props;
+        std::vector optional_props;
+        std::unordered_map prop_kv_rule_names;
+        std::vector prop_names;
+        for (const auto & kv : properties) {
+            const auto &prop_name = kv.first;
+            const auto &prop_schema = kv.second;
+
+            std::string prop_rule_name = visit(prop_schema, name + (name.empty() ? "" : "-") + prop_name);
+            prop_kv_rule_names[prop_name] = _add_rule(
+                name + (name.empty() ? "" : "-") + prop_name + "-kv",
+                format_literal(json(prop_name).dump()) + " space \":\" space " + prop_rule_name
+            );
+            if (required.find(prop_name) != required.end()) {
+                required_props.push_back(prop_name);
+            } else {
+                optional_props.push_back(prop_name);
+            }
+            prop_names.push_back(prop_name);
+        }
+        if ((additional_properties.is_boolean() && additional_properties.get()) || additional_properties.is_object()) {
+            std::string sub_name = name + (name.empty() ? "" : "-") + "additional";
+            std::string value_rule =
+                additional_properties.is_object() ? visit(additional_properties, sub_name + "-value")
+                : _add_primitive("value", PRIMITIVE_RULES.at("value"));
+
+            auto key_rule =
+                prop_names.empty() ? _add_primitive("string", PRIMITIVE_RULES.at("string"))
+                : _add_rule(sub_name + "-k", _not_strings(prop_names));
+            std::string kv_rule = _add_rule(sub_name + "-kv", key_rule + " \":\" space " + value_rule);
+            prop_kv_rule_names["*"] = kv_rule;
+            optional_props.push_back("*");
+        }
+
+        std::string rule = "\"{\" space ";
+        for (size_t i = 0; i < required_props.size(); i++) {
+            if (i > 0) {
+                rule += " \",\" space ";
+            }
+            rule += prop_kv_rule_names[required_props[i]];
+        }
+
+        if (!optional_props.empty()) {
+            rule += " (";
+            if (!required_props.empty()) {
+                rule += " \",\" space ( ";
+            }
+
+            std::function &, bool)> get_recursive_refs = [&](const std::vector & ks, bool first_is_optional) {
+                std::string res;
+                if (ks.empty()) {
+                    return res;
+                }
+                std::string k = ks[0];
+                std::string kv_rule_name = prop_kv_rule_names[k];
+                std::string comma_ref = "( \",\" space " + kv_rule_name + " )";
+                if (first_is_optional) {
+                    res = comma_ref + (k == "*" ? "*" : "?");
+                } else {
+                    res = kv_rule_name + (k == "*" ? " " + comma_ref + "*" : "");
+                }
+                if (ks.size() > 1) {
+                    res += " " + _add_rule(
+                        name + (name.empty() ? "" : "-") + k + "-rest",
+                        get_recursive_refs(std::vector(ks.begin() + 1, ks.end()), true)
+                    );
+                }
+                return res;
+            };
+
+            for (size_t i = 0; i < optional_props.size(); i++) {
+                if (i > 0) {
+                    rule += " | ";
+                }
+                rule += get_recursive_refs(std::vector(optional_props.begin() + i, optional_props.end()), false);
+            }
+            if (!required_props.empty()) {
+                rule += " )";
+            }
+            rule += " )?";
+        }
+
+        rule += " \"}\" space";
+
+        return rule;
+    }
+
+    std::string _add_primitive(const std::string & name, const BuiltinRule & rule) {
+        auto n = _add_rule(name, rule.content);
+        for (const auto & dep : rule.deps) {
+            BuiltinRule dep_rule;
+            auto it = PRIMITIVE_RULES.find(dep);
+            if (it == PRIMITIVE_RULES.end()) {
+                it = STRING_FORMAT_RULES.find(dep);
+                if (it == STRING_FORMAT_RULES.end()) {
+                    _errors.push_back("Rule " + dep + " not known");
+                    continue;
+                }
+            }
+            if (_rules.find(dep) == _rules.end()) {
+                _add_primitive(dep, it->second);
+            }
+        }
+        return n;
+    }
+
+public:
+    SchemaConverter(
+        const std::function & fetch_json,
+        bool dotall)
+          : _fetch_json(fetch_json), _dotall(dotall)
+    {
+        _rules["space"] = SPACE_RULE;
+    }
+
+    void resolve_refs(json & schema, const std::string & url) {
+        /*
+        * Resolves all $ref fields in the given schema, fetching any remote schemas,
+        * replacing each $ref with absolute reference URL and populates _refs with the
+        * respective referenced (sub)schema dictionaries.
+        */
+        std::function visit_refs = [&](json & n) {
+            if (n.is_array()) {
+                for (auto & x : n) {
+                    visit_refs(x);
+                }
+            } else if (n.is_object()) {
+                if (n.contains("$ref")) {
+                    std::string ref = n["$ref"];
+                    if (_refs.find(ref) == _refs.end()) {
+                        json target;
+                        if (ref.find("https://") == 0) {
+                            std::string base_url = ref.substr(0, ref.find('#'));
+                            auto it = _refs.find(base_url);
+                            if (it != _refs.end()) {
+                                target = it->second;
+                            } else {
+                                // Fetch the referenced schema and resolve its refs
+                                auto referenced = _fetch_json(ref);
+                                resolve_refs(referenced, base_url);
+                                _refs[base_url] = referenced;
+                            }
+                            if (ref.find('#') == std::string::npos || ref.substr(ref.find('#') + 1).empty()) {
+                                return;
+                            }
+                        } else if (ref.find("#/") == 0) {
+                            target = schema;
+                            n["$ref"] = url + ref;
+                            ref = url + ref;
+                        } else {
+                            _errors.push_back("Unsupported ref: " + ref);
+                            return;
+                        }
+                        std::string pointer = ref.substr(ref.find('#') + 1);
+                        std::vector tokens = string_split(pointer, "/");
+                        for (size_t i = 1; i < tokens.size(); ++i) {
+                            std::string sel = tokens[i];
+                            if (target.is_null() || !target.contains(sel)) {
+                                _errors.push_back("Error resolving ref " + ref + ": " + sel + " not in " + target.dump());
+                                return;
+                            }
+                            target = target[sel];
+                        }
+                        _refs[ref] = target;
+                    }
+                } else {
+                    for (auto & kv : n.items()) {
+                        visit_refs(kv.value());
+                    }
+                }
+            }
+        };
+
+        visit_refs(schema);
+    }
+
+    std::string _generate_constant_rule(const json & value) {
+        return format_literal(value.dump());
+    }
+
+    std::string visit(const json & schema, const std::string & name) {
+        json schema_type = schema.contains("type") ? schema["type"] : json();
+        std::string schema_format = schema.contains("format") ? schema["format"].get() : "";
+        std::string rule_name = is_reserved_name(name) ? name + "-" : name.empty() ? "root" : name;
+
+        if (schema.contains("$ref")) {
+            return _add_rule(rule_name, _resolve_ref(schema["$ref"]));
+        } else if (schema.contains("oneOf") || schema.contains("anyOf")) {
+            std::vector alt_schemas = schema.contains("oneOf") ? schema["oneOf"].get>() : schema["anyOf"].get>();
+            return _add_rule(rule_name, _generate_union_rule(name, alt_schemas));
+        } else if (schema_type.is_array()) {
+            std::vector schema_types;
+            for (const auto & t : schema_type) {
+                json schema_copy(schema);
+                schema_copy["type"] = t;
+                schema_types.push_back(schema_copy);
+            }
+            return _add_rule(rule_name, _generate_union_rule(name, schema_types));
+        } else if (schema.contains("const")) {
+            return _add_rule(rule_name, _generate_constant_rule(schema["const"]) + " space");
+        } else if (schema.contains("enum")) {
+            std::vector enum_values;
+            for (const auto & v : schema["enum"]) {
+                enum_values.push_back(_generate_constant_rule(v));
+            }
+            return _add_rule(rule_name, "(" + string_join(enum_values, " | ") + ") space");
+        } else if ((schema_type.is_null() || schema_type == "object")
+                && (schema.contains("properties") ||
+                    (schema.contains("additionalProperties") && schema["additionalProperties"] != true))) {
+            std::unordered_set required;
+            if (schema.contains("required") && schema["required"].is_array()) {
+                for (const auto & item : schema["required"]) {
+                    if (item.is_string()) {
+                        required.insert(item.get());
+                    }
+                }
+            }
+            std::vector> properties;
+            if (schema.contains("properties")) {
+                for (const auto & prop : schema["properties"].items()) {
+                    properties.emplace_back(prop.key(), prop.value());
+                }
+            }
+            return _add_rule(rule_name,
+                _build_object_rule(
+                    properties, required, name,
+                    schema.contains("additionalProperties") ? schema["additionalProperties"] : json()));
+        } else if ((schema_type.is_null() || schema_type == "object") && schema.contains("allOf")) {
+            std::unordered_set required;
+            std::vector> properties;
+            std::string hybrid_name = name;
+            std::function add_component = [&](const json & comp_schema, bool is_required) {
+                if (comp_schema.contains("$ref")) {
+                    add_component(_refs[comp_schema["$ref"]], is_required);
+                } else if (comp_schema.contains("properties")) {
+                    for (const auto & prop : comp_schema["properties"].items()) {
+                        properties.emplace_back(prop.key(), prop.value());
+                        if (is_required) {
+                            required.insert(prop.key());
+                        }
+                    }
+                } else {
+                  // todo warning
+                }
+            };
+            for (auto & t : schema["allOf"]) {
+                if (t.contains("anyOf")) {
+                    for (auto & tt : t["anyOf"]) {
+                        add_component(tt, false);
+                    }
+                } else {
+                    add_component(t, true);
+                }
+            }
+            return _add_rule(rule_name, _build_object_rule(properties, required, hybrid_name, json()));
+        } else if ((schema_type.is_null() || schema_type == "array") && (schema.contains("items") || schema.contains("prefixItems"))) {
+            json items = schema.contains("items") ? schema["items"] : schema["prefixItems"];
+            if (items.is_array()) {
+                std::string rule = "\"[\" space ";
+                for (size_t i = 0; i < items.size(); i++) {
+                    if (i > 0) {
+                        rule += " \",\" space ";
+                    }
+                    rule += visit(items[i], name + (name.empty() ? "" : "-") + "tuple-" + std::to_string(i));
+                }
+                rule += " \"]\" space";
+                return _add_rule(rule_name, rule);
+            } else {
+                std::string item_rule_name = visit(items, name + (name.empty() ? "" : "-") + "item");
+                int min_items = schema.contains("minItems") ? schema["minItems"].get() : 0;
+                json max_items_json = schema.contains("maxItems") ? schema["maxItems"] : json();
+                int max_items = max_items_json.is_number_integer() ? max_items_json.get() : std::numeric_limits::max();
+
+                return _add_rule(rule_name, "\"[\" space " + build_repetition(item_rule_name, min_items, max_items, "\",\" space") + " \"]\" space");
+            }
+        } else if ((schema_type.is_null() || schema_type == "string") && schema.contains("pattern")) {
+            return _visit_pattern(schema["pattern"], rule_name);
+        } else if ((schema_type.is_null() || schema_type == "string") && std::regex_match(schema_format, std::regex("^uuid[1-5]?$"))) {
+            return _add_primitive(rule_name == "root" ? "root" : schema_format, PRIMITIVE_RULES.at("uuid"));
+        } else if ((schema_type.is_null() || schema_type == "string") && STRING_FORMAT_RULES.find(schema_format + "-string") != STRING_FORMAT_RULES.end()) {
+            auto prim_name = schema_format + "-string";
+            return _add_rule(rule_name, _add_primitive(prim_name, STRING_FORMAT_RULES.at(prim_name)));
+        } else if (schema_type == "string" && (schema.contains("minLength") || schema.contains("maxLength"))) {
+            std::string char_rule = _add_primitive("char", PRIMITIVE_RULES.at("char"));
+            int min_len = schema.contains("minLength") ? schema["minLength"].get() : 0;
+            int max_len = schema.contains("maxLength") ? schema["maxLength"].get() : std::numeric_limits::max();
+            return _add_rule(rule_name, "\"\\\"\" " + build_repetition(char_rule, min_len, max_len) + " \"\\\"\" space");
+        } else if (schema_type == "integer" && (schema.contains("minimum") || schema.contains("exclusiveMinimum") || schema.contains("maximum") || schema.contains("exclusiveMaximum"))) {
+            int min_value = std::numeric_limits::min();
+            int max_value = std::numeric_limits::max();
+            if (schema.contains("minimum")) {
+                min_value = schema["minimum"].get();
+            } else if (schema.contains("exclusiveMinimum")) {
+                min_value = schema["exclusiveMinimum"].get() + 1;
+            }
+            if (schema.contains("maximum")) {
+                max_value = schema["maximum"].get();
+            } else if (schema.contains("exclusiveMaximum")) {
+                max_value = schema["exclusiveMaximum"].get() - 1;
+            }
+            std::stringstream out;
+            out << "(";
+            _build_min_max_int(min_value, max_value, out);
+            out << ") space";
+            return _add_rule(rule_name, out.str());
+        } else if (schema.empty() || schema_type == "object") {
+            return _add_rule(rule_name, _add_primitive("object", PRIMITIVE_RULES.at("object")));
+        } else {
+            if (!schema_type.is_string() || PRIMITIVE_RULES.find(schema_type.get()) == PRIMITIVE_RULES.end()) {
+                _errors.push_back("Unrecognized schema: " + schema.dump());
+                return "";
+            }
+            // TODO: support minimum, maximum, exclusiveMinimum, exclusiveMaximum at least for zero
+            return _add_primitive(rule_name == "root" ? "root" : schema_type.get(), PRIMITIVE_RULES.at(schema_type.get()));
+        }
+    }
+
+    void check_errors() {
+        if (!_errors.empty()) {
+            throw std::runtime_error("JSON schema conversion failed:\n" + string_join(_errors, "\n"));
+        }
+        if (!_warnings.empty()) {
+            fprintf(stderr, "WARNING: JSON schema conversion was incomplete: %s\n", string_join(_warnings, "; ").c_str());
+        }
+    }
+
+    std::string format_grammar() {
+        std::stringstream ss;
+        for (const auto & kv : _rules) {
+            ss << kv.first << " ::= " << kv.second << std::endl;
+        }
+        return ss.str();
+    }
+};
+
+std::string json_schema_to_grammar(const json & schema, bool force_gbnf) {
+#ifdef LLAMA_USE_LLGUIDANCE
+    if (!force_gbnf) {
+        return "%llguidance {}\nstart: %json " + schema.dump();
+    }
+#else
+    (void)force_gbnf;
+#endif // LLAMA_USE_LLGUIDANCE
+    return build_grammar([&](const common_grammar_builder & callbacks) {
+        auto copy = schema;
+        callbacks.resolve_refs(copy);
+        callbacks.add_schema("", copy);
+    });
+}
+
+std::string build_grammar(const std::function & cb, const common_grammar_options & options) {
+    SchemaConverter converter([&](const std::string &) { return json(); }, options.dotall);
+    common_grammar_builder builder {
+        /* .add_rule = */ [&](const std::string & name, const std::string & rule) {
+            return converter._add_rule(name, rule);
+        },
+        /* .add_schema = */ [&](const std::string & name, const nlohmann::ordered_json & schema) {
+            return converter.visit(schema, name == "root" ? "" : name);
+        },
+        /* .resolve_refs = */ [&](nlohmann::ordered_json & schema) {
+            converter.resolve_refs(schema, "");
+        }
+    };
+    cb(builder);
+    converter.check_errors();
+    return converter.format_grammar();
+}
diff --git a/llama/llama.cpp/common/json-schema-to-grammar.h b/llama/llama.cpp/common/json-schema-to-grammar.h
new file mode 100644
index 0000000..4613f5d
--- /dev/null
+++ b/llama/llama.cpp/common/json-schema-to-grammar.h
@@ -0,0 +1,21 @@
+#pragma once
+
+#include "ggml.h"
+// Change JSON_ASSERT from assert() to GGML_ASSERT:
+#define JSON_ASSERT GGML_ASSERT
+#include "json.hpp"
+
+std::string json_schema_to_grammar(const nlohmann::ordered_json & schema,
+                                   bool force_gbnf = false);
+
+struct common_grammar_builder {
+    std::function add_rule;
+    std::function add_schema;
+    std::function resolve_refs;
+};
+
+struct common_grammar_options {
+    bool dotall = false;
+};
+
+std::string build_grammar(const std::function & cb, const common_grammar_options & options = {});
diff --git a/llama/llama.cpp/common/json.hpp b/llama/llama.cpp/common/json.hpp
new file mode 100644
index 0000000..a858728
--- /dev/null
+++ b/llama/llama.cpp/common/json.hpp
@@ -0,0 +1,24766 @@
+//     __ _____ _____ _____
+//  __|  |   __|     |   | |  JSON for Modern C++
+// |  |  |__   |  |  | | | |  version 3.11.3
+// |_____|_____|_____|_|___|  https://github.com/nlohmann/json
+//
+// SPDX-FileCopyrightText: 2013-2023 Niels Lohmann 
+// SPDX-License-Identifier: MIT
+
+/****************************************************************************\
+ * Note on documentation: The source files contain links to the online      *
+ * documentation of the public API at https://json.nlohmann.me. This URL    *
+ * contains the most recent documentation and should also be applicable to  *
+ * previous versions; documentation for deprecated functions is not         *
+ * removed, but marked deprecated. See "Generate documentation" section in  *
+ * file docs/README.md.                                                     *
+\****************************************************************************/
+
+#ifndef INCLUDE_NLOHMANN_JSON_HPP_
+#define INCLUDE_NLOHMANN_JSON_HPP_
+
+#include  // all_of, find, for_each
+#include  // nullptr_t, ptrdiff_t, size_t
+#include  // hash, less
+#include  // initializer_list
+#ifndef JSON_NO_IO
+    #include  // istream, ostream
+#endif  // JSON_NO_IO
+#include  // random_access_iterator_tag
+#include  // unique_ptr
+#include  // string, stoi, to_string
+#include  // declval, forward, move, pair, swap
+#include  // vector
+
+// #include 
+//     __ _____ _____ _____
+//  __|  |   __|     |   | |  JSON for Modern C++
+// |  |  |__   |  |  | | | |  version 3.11.3
+// |_____|_____|_____|_|___|  https://github.com/nlohmann/json
+//
+// SPDX-FileCopyrightText: 2013-2023 Niels Lohmann 
+// SPDX-License-Identifier: MIT
+
+
+
+#include 
+
+// #include 
+//     __ _____ _____ _____
+//  __|  |   __|     |   | |  JSON for Modern C++
+// |  |  |__   |  |  | | | |  version 3.11.3
+// |_____|_____|_____|_|___|  https://github.com/nlohmann/json
+//
+// SPDX-FileCopyrightText: 2013-2023 Niels Lohmann 
+// SPDX-License-Identifier: MIT
+
+
+
+// This file contains all macro definitions affecting or depending on the ABI
+
+#ifndef JSON_SKIP_LIBRARY_VERSION_CHECK
+    #if defined(NLOHMANN_JSON_VERSION_MAJOR) && defined(NLOHMANN_JSON_VERSION_MINOR) && defined(NLOHMANN_JSON_VERSION_PATCH)
+        #if NLOHMANN_JSON_VERSION_MAJOR != 3 || NLOHMANN_JSON_VERSION_MINOR != 11 || NLOHMANN_JSON_VERSION_PATCH != 3
+            #warning "Already included a different version of the library!"
+        #endif
+    #endif
+#endif
+
+#define NLOHMANN_JSON_VERSION_MAJOR 3   // NOLINT(modernize-macro-to-enum)
+#define NLOHMANN_JSON_VERSION_MINOR 11  // NOLINT(modernize-macro-to-enum)
+#define NLOHMANN_JSON_VERSION_PATCH 3   // NOLINT(modernize-macro-to-enum)
+
+#ifndef JSON_DIAGNOSTICS
+    #define JSON_DIAGNOSTICS 0
+#endif
+
+#ifndef JSON_USE_LEGACY_DISCARDED_VALUE_COMPARISON
+    #define JSON_USE_LEGACY_DISCARDED_VALUE_COMPARISON 0
+#endif
+
+#if JSON_DIAGNOSTICS
+    #define NLOHMANN_JSON_ABI_TAG_DIAGNOSTICS _diag
+#else
+    #define NLOHMANN_JSON_ABI_TAG_DIAGNOSTICS
+#endif
+
+#if JSON_USE_LEGACY_DISCARDED_VALUE_COMPARISON
+    #define NLOHMANN_JSON_ABI_TAG_LEGACY_DISCARDED_VALUE_COMPARISON _ldvcmp
+#else
+    #define NLOHMANN_JSON_ABI_TAG_LEGACY_DISCARDED_VALUE_COMPARISON
+#endif
+
+#ifndef NLOHMANN_JSON_NAMESPACE_NO_VERSION
+    #define NLOHMANN_JSON_NAMESPACE_NO_VERSION 0
+#endif
+
+// Construct the namespace ABI tags component
+#define NLOHMANN_JSON_ABI_TAGS_CONCAT_EX(a, b) json_abi ## a ## b
+#define NLOHMANN_JSON_ABI_TAGS_CONCAT(a, b) \
+    NLOHMANN_JSON_ABI_TAGS_CONCAT_EX(a, b)
+
+#define NLOHMANN_JSON_ABI_TAGS                                       \
+    NLOHMANN_JSON_ABI_TAGS_CONCAT(                                   \
+            NLOHMANN_JSON_ABI_TAG_DIAGNOSTICS,                       \
+            NLOHMANN_JSON_ABI_TAG_LEGACY_DISCARDED_VALUE_COMPARISON)
+
+// Construct the namespace version component
+#define NLOHMANN_JSON_NAMESPACE_VERSION_CONCAT_EX(major, minor, patch) \
+    _v ## major ## _ ## minor ## _ ## patch
+#define NLOHMANN_JSON_NAMESPACE_VERSION_CONCAT(major, minor, patch) \
+    NLOHMANN_JSON_NAMESPACE_VERSION_CONCAT_EX(major, minor, patch)
+
+#if NLOHMANN_JSON_NAMESPACE_NO_VERSION
+#define NLOHMANN_JSON_NAMESPACE_VERSION
+#else
+#define NLOHMANN_JSON_NAMESPACE_VERSION                                 \
+    NLOHMANN_JSON_NAMESPACE_VERSION_CONCAT(NLOHMANN_JSON_VERSION_MAJOR, \
+                                           NLOHMANN_JSON_VERSION_MINOR, \
+                                           NLOHMANN_JSON_VERSION_PATCH)
+#endif
+
+// Combine namespace components
+#define NLOHMANN_JSON_NAMESPACE_CONCAT_EX(a, b) a ## b
+#define NLOHMANN_JSON_NAMESPACE_CONCAT(a, b) \
+    NLOHMANN_JSON_NAMESPACE_CONCAT_EX(a, b)
+
+#ifndef NLOHMANN_JSON_NAMESPACE
+#define NLOHMANN_JSON_NAMESPACE               \
+    nlohmann::NLOHMANN_JSON_NAMESPACE_CONCAT( \
+            NLOHMANN_JSON_ABI_TAGS,           \
+            NLOHMANN_JSON_NAMESPACE_VERSION)
+#endif
+
+#ifndef NLOHMANN_JSON_NAMESPACE_BEGIN
+#define NLOHMANN_JSON_NAMESPACE_BEGIN                \
+    namespace nlohmann                               \
+    {                                                \
+    inline namespace NLOHMANN_JSON_NAMESPACE_CONCAT( \
+                NLOHMANN_JSON_ABI_TAGS,              \
+                NLOHMANN_JSON_NAMESPACE_VERSION)     \
+    {
+#endif
+
+#ifndef NLOHMANN_JSON_NAMESPACE_END
+#define NLOHMANN_JSON_NAMESPACE_END                                     \
+    }  /* namespace (inline namespace) NOLINT(readability/namespace) */ \
+    }  // namespace nlohmann
+#endif
+
+// #include 
+//     __ _____ _____ _____
+//  __|  |   __|     |   | |  JSON for Modern C++
+// |  |  |__   |  |  | | | |  version 3.11.3
+// |_____|_____|_____|_|___|  https://github.com/nlohmann/json
+//
+// SPDX-FileCopyrightText: 2013-2023 Niels Lohmann 
+// SPDX-License-Identifier: MIT
+
+
+
+#include  // transform
+#include  // array
+#include  // forward_list
+#include  // inserter, front_inserter, end
+#include  // map
+#include  // string
+#include  // tuple, make_tuple
+#include  // is_arithmetic, is_same, is_enum, underlying_type, is_convertible
+#include  // unordered_map
+#include  // pair, declval
+#include  // valarray
+
+// #include 
+//     __ _____ _____ _____
+//  __|  |   __|     |   | |  JSON for Modern C++
+// |  |  |__   |  |  | | | |  version 3.11.3
+// |_____|_____|_____|_|___|  https://github.com/nlohmann/json
+//
+// SPDX-FileCopyrightText: 2013-2023 Niels Lohmann 
+// SPDX-License-Identifier: MIT
+
+
+
+#include  // nullptr_t
+#include  // exception
+#if JSON_DIAGNOSTICS
+    #include  // accumulate
+#endif
+#include  // runtime_error
+#include  // to_string
+#include  // vector
+
+// #include 
+//     __ _____ _____ _____
+//  __|  |   __|     |   | |  JSON for Modern C++
+// |  |  |__   |  |  | | | |  version 3.11.3
+// |_____|_____|_____|_|___|  https://github.com/nlohmann/json
+//
+// SPDX-FileCopyrightText: 2013-2023 Niels Lohmann 
+// SPDX-License-Identifier: MIT
+
+
+
+#include  // array
+#include  // size_t
+#include  // uint8_t
+#include  // string
+
+// #include 
+//     __ _____ _____ _____
+//  __|  |   __|     |   | |  JSON for Modern C++
+// |  |  |__   |  |  | | | |  version 3.11.3
+// |_____|_____|_____|_|___|  https://github.com/nlohmann/json
+//
+// SPDX-FileCopyrightText: 2013-2023 Niels Lohmann 
+// SPDX-License-Identifier: MIT
+
+
+
+#include  // declval, pair
+// #include 
+//     __ _____ _____ _____
+//  __|  |   __|     |   | |  JSON for Modern C++
+// |  |  |__   |  |  | | | |  version 3.11.3
+// |_____|_____|_____|_|___|  https://github.com/nlohmann/json
+//
+// SPDX-FileCopyrightText: 2013-2023 Niels Lohmann 
+// SPDX-License-Identifier: MIT
+
+
+
+#include 
+
+// #include 
+//     __ _____ _____ _____
+//  __|  |   __|     |   | |  JSON for Modern C++
+// |  |  |__   |  |  | | | |  version 3.11.3
+// |_____|_____|_____|_|___|  https://github.com/nlohmann/json
+//
+// SPDX-FileCopyrightText: 2013-2023 Niels Lohmann 
+// SPDX-License-Identifier: MIT
+
+
+
+// #include 
+
+
+NLOHMANN_JSON_NAMESPACE_BEGIN
+namespace detail
+{
+
+template struct make_void
+{
+    using type = void;
+};
+template using void_t = typename make_void::type;
+
+}  // namespace detail
+NLOHMANN_JSON_NAMESPACE_END
+
+
+NLOHMANN_JSON_NAMESPACE_BEGIN
+namespace detail
+{
+
+// https://en.cppreference.com/w/cpp/experimental/is_detected
+struct nonesuch
+{
+    nonesuch() = delete;
+    ~nonesuch() = delete;
+    nonesuch(nonesuch const&) = delete;
+    nonesuch(nonesuch const&&) = delete;
+    void operator=(nonesuch const&) = delete;
+    void operator=(nonesuch&&) = delete;
+};
+
+template class Op,
+         class... Args>
+struct detector
+{
+    using value_t = std::false_type;
+    using type = Default;
+};
+
+template class Op, class... Args>
+struct detector>, Op, Args...>
+{
+    using value_t = std::true_type;
+    using type = Op;
+};
+
+template class Op, class... Args>
+using is_detected = typename detector::value_t;
+
+template class Op, class... Args>
+struct is_detected_lazy : is_detected { };
+
+template class Op, class... Args>
+using detected_t = typename detector::type;
+
+template class Op, class... Args>
+using detected_or = detector;
+
+template class Op, class... Args>
+using detected_or_t = typename detected_or::type;
+
+template class Op, class... Args>
+using is_detected_exact = std::is_same>;
+
+template class Op, class... Args>
+using is_detected_convertible =
+    std::is_convertible, To>;
+
+}  // namespace detail
+NLOHMANN_JSON_NAMESPACE_END
+
+// #include 
+
+
+//     __ _____ _____ _____
+//  __|  |   __|     |   | |  JSON for Modern C++
+// |  |  |__   |  |  | | | |  version 3.11.3
+// |_____|_____|_____|_|___|  https://github.com/nlohmann/json
+//
+// SPDX-FileCopyrightText: 2013-2023 Niels Lohmann 
+// SPDX-FileCopyrightText: 2016-2021 Evan Nemerson 
+// SPDX-License-Identifier: MIT
+
+/* Hedley - https://nemequ.github.io/hedley
+ * Created by Evan Nemerson 
+ */
+
+#if !defined(JSON_HEDLEY_VERSION) || (JSON_HEDLEY_VERSION < 15)
+#if defined(JSON_HEDLEY_VERSION)
+    #undef JSON_HEDLEY_VERSION
+#endif
+#define JSON_HEDLEY_VERSION 15
+
+#if defined(JSON_HEDLEY_STRINGIFY_EX)
+    #undef JSON_HEDLEY_STRINGIFY_EX
+#endif
+#define JSON_HEDLEY_STRINGIFY_EX(x) #x
+
+#if defined(JSON_HEDLEY_STRINGIFY)
+    #undef JSON_HEDLEY_STRINGIFY
+#endif
+#define JSON_HEDLEY_STRINGIFY(x) JSON_HEDLEY_STRINGIFY_EX(x)
+
+#if defined(JSON_HEDLEY_CONCAT_EX)
+    #undef JSON_HEDLEY_CONCAT_EX
+#endif
+#define JSON_HEDLEY_CONCAT_EX(a,b) a##b
+
+#if defined(JSON_HEDLEY_CONCAT)
+    #undef JSON_HEDLEY_CONCAT
+#endif
+#define JSON_HEDLEY_CONCAT(a,b) JSON_HEDLEY_CONCAT_EX(a,b)
+
+#if defined(JSON_HEDLEY_CONCAT3_EX)
+    #undef JSON_HEDLEY_CONCAT3_EX
+#endif
+#define JSON_HEDLEY_CONCAT3_EX(a,b,c) a##b##c
+
+#if defined(JSON_HEDLEY_CONCAT3)
+    #undef JSON_HEDLEY_CONCAT3
+#endif
+#define JSON_HEDLEY_CONCAT3(a,b,c) JSON_HEDLEY_CONCAT3_EX(a,b,c)
+
+#if defined(JSON_HEDLEY_VERSION_ENCODE)
+    #undef JSON_HEDLEY_VERSION_ENCODE
+#endif
+#define JSON_HEDLEY_VERSION_ENCODE(major,minor,revision) (((major) * 1000000) + ((minor) * 1000) + (revision))
+
+#if defined(JSON_HEDLEY_VERSION_DECODE_MAJOR)
+    #undef JSON_HEDLEY_VERSION_DECODE_MAJOR
+#endif
+#define JSON_HEDLEY_VERSION_DECODE_MAJOR(version) ((version) / 1000000)
+
+#if defined(JSON_HEDLEY_VERSION_DECODE_MINOR)
+    #undef JSON_HEDLEY_VERSION_DECODE_MINOR
+#endif
+#define JSON_HEDLEY_VERSION_DECODE_MINOR(version) (((version) % 1000000) / 1000)
+
+#if defined(JSON_HEDLEY_VERSION_DECODE_REVISION)
+    #undef JSON_HEDLEY_VERSION_DECODE_REVISION
+#endif
+#define JSON_HEDLEY_VERSION_DECODE_REVISION(version) ((version) % 1000)
+
+#if defined(JSON_HEDLEY_GNUC_VERSION)
+    #undef JSON_HEDLEY_GNUC_VERSION
+#endif
+#if defined(__GNUC__) && defined(__GNUC_PATCHLEVEL__)
+    #define JSON_HEDLEY_GNUC_VERSION JSON_HEDLEY_VERSION_ENCODE(__GNUC__, __GNUC_MINOR__, __GNUC_PATCHLEVEL__)
+#elif defined(__GNUC__)
+    #define JSON_HEDLEY_GNUC_VERSION JSON_HEDLEY_VERSION_ENCODE(__GNUC__, __GNUC_MINOR__, 0)
+#endif
+
+#if defined(JSON_HEDLEY_GNUC_VERSION_CHECK)
+    #undef JSON_HEDLEY_GNUC_VERSION_CHECK
+#endif
+#if defined(JSON_HEDLEY_GNUC_VERSION)
+    #define JSON_HEDLEY_GNUC_VERSION_CHECK(major,minor,patch) (JSON_HEDLEY_GNUC_VERSION >= JSON_HEDLEY_VERSION_ENCODE(major, minor, patch))
+#else
+    #define JSON_HEDLEY_GNUC_VERSION_CHECK(major,minor,patch) (0)
+#endif
+
+#if defined(JSON_HEDLEY_MSVC_VERSION)
+    #undef JSON_HEDLEY_MSVC_VERSION
+#endif
+#if defined(_MSC_FULL_VER) && (_MSC_FULL_VER >= 140000000) && !defined(__ICL)
+    #define JSON_HEDLEY_MSVC_VERSION JSON_HEDLEY_VERSION_ENCODE(_MSC_FULL_VER / 10000000, (_MSC_FULL_VER % 10000000) / 100000, (_MSC_FULL_VER % 100000) / 100)
+#elif defined(_MSC_FULL_VER) && !defined(__ICL)
+    #define JSON_HEDLEY_MSVC_VERSION JSON_HEDLEY_VERSION_ENCODE(_MSC_FULL_VER / 1000000, (_MSC_FULL_VER % 1000000) / 10000, (_MSC_FULL_VER % 10000) / 10)
+#elif defined(_MSC_VER) && !defined(__ICL)
+    #define JSON_HEDLEY_MSVC_VERSION JSON_HEDLEY_VERSION_ENCODE(_MSC_VER / 100, _MSC_VER % 100, 0)
+#endif
+
+#if defined(JSON_HEDLEY_MSVC_VERSION_CHECK)
+    #undef JSON_HEDLEY_MSVC_VERSION_CHECK
+#endif
+#if !defined(JSON_HEDLEY_MSVC_VERSION)
+    #define JSON_HEDLEY_MSVC_VERSION_CHECK(major,minor,patch) (0)
+#elif defined(_MSC_VER) && (_MSC_VER >= 1400)
+    #define JSON_HEDLEY_MSVC_VERSION_CHECK(major,minor,patch) (_MSC_FULL_VER >= ((major * 10000000) + (minor * 100000) + (patch)))
+#elif defined(_MSC_VER) && (_MSC_VER >= 1200)
+    #define JSON_HEDLEY_MSVC_VERSION_CHECK(major,minor,patch) (_MSC_FULL_VER >= ((major * 1000000) + (minor * 10000) + (patch)))
+#else
+    #define JSON_HEDLEY_MSVC_VERSION_CHECK(major,minor,patch) (_MSC_VER >= ((major * 100) + (minor)))
+#endif
+
+#if defined(JSON_HEDLEY_INTEL_VERSION)
+    #undef JSON_HEDLEY_INTEL_VERSION
+#endif
+#if defined(__INTEL_COMPILER) && defined(__INTEL_COMPILER_UPDATE) && !defined(__ICL)
+    #define JSON_HEDLEY_INTEL_VERSION JSON_HEDLEY_VERSION_ENCODE(__INTEL_COMPILER / 100, __INTEL_COMPILER % 100, __INTEL_COMPILER_UPDATE)
+#elif defined(__INTEL_COMPILER) && !defined(__ICL)
+    #define JSON_HEDLEY_INTEL_VERSION JSON_HEDLEY_VERSION_ENCODE(__INTEL_COMPILER / 100, __INTEL_COMPILER % 100, 0)
+#endif
+
+#if defined(JSON_HEDLEY_INTEL_VERSION_CHECK)
+    #undef JSON_HEDLEY_INTEL_VERSION_CHECK
+#endif
+#if defined(JSON_HEDLEY_INTEL_VERSION)
+    #define JSON_HEDLEY_INTEL_VERSION_CHECK(major,minor,patch) (JSON_HEDLEY_INTEL_VERSION >= JSON_HEDLEY_VERSION_ENCODE(major, minor, patch))
+#else
+    #define JSON_HEDLEY_INTEL_VERSION_CHECK(major,minor,patch) (0)
+#endif
+
+#if defined(JSON_HEDLEY_INTEL_CL_VERSION)
+    #undef JSON_HEDLEY_INTEL_CL_VERSION
+#endif
+#if defined(__INTEL_COMPILER) && defined(__INTEL_COMPILER_UPDATE) && defined(__ICL)
+    #define JSON_HEDLEY_INTEL_CL_VERSION JSON_HEDLEY_VERSION_ENCODE(__INTEL_COMPILER, __INTEL_COMPILER_UPDATE, 0)
+#endif
+
+#if defined(JSON_HEDLEY_INTEL_CL_VERSION_CHECK)
+    #undef JSON_HEDLEY_INTEL_CL_VERSION_CHECK
+#endif
+#if defined(JSON_HEDLEY_INTEL_CL_VERSION)
+    #define JSON_HEDLEY_INTEL_CL_VERSION_CHECK(major,minor,patch) (JSON_HEDLEY_INTEL_CL_VERSION >= JSON_HEDLEY_VERSION_ENCODE(major, minor, patch))
+#else
+    #define JSON_HEDLEY_INTEL_CL_VERSION_CHECK(major,minor,patch) (0)
+#endif
+
+#if defined(JSON_HEDLEY_PGI_VERSION)
+    #undef JSON_HEDLEY_PGI_VERSION
+#endif
+#if defined(__PGI) && defined(__PGIC__) && defined(__PGIC_MINOR__) && defined(__PGIC_PATCHLEVEL__)
+    #define JSON_HEDLEY_PGI_VERSION JSON_HEDLEY_VERSION_ENCODE(__PGIC__, __PGIC_MINOR__, __PGIC_PATCHLEVEL__)
+#endif
+
+#if defined(JSON_HEDLEY_PGI_VERSION_CHECK)
+    #undef JSON_HEDLEY_PGI_VERSION_CHECK
+#endif
+#if defined(JSON_HEDLEY_PGI_VERSION)
+    #define JSON_HEDLEY_PGI_VERSION_CHECK(major,minor,patch) (JSON_HEDLEY_PGI_VERSION >= JSON_HEDLEY_VERSION_ENCODE(major, minor, patch))
+#else
+    #define JSON_HEDLEY_PGI_VERSION_CHECK(major,minor,patch) (0)
+#endif
+
+#if defined(JSON_HEDLEY_SUNPRO_VERSION)
+    #undef JSON_HEDLEY_SUNPRO_VERSION
+#endif
+#if defined(__SUNPRO_C) && (__SUNPRO_C > 0x1000)
+    #define JSON_HEDLEY_SUNPRO_VERSION JSON_HEDLEY_VERSION_ENCODE((((__SUNPRO_C >> 16) & 0xf) * 10) + ((__SUNPRO_C >> 12) & 0xf), (((__SUNPRO_C >> 8) & 0xf) * 10) + ((__SUNPRO_C >> 4) & 0xf), (__SUNPRO_C & 0xf) * 10)
+#elif defined(__SUNPRO_C)
+    #define JSON_HEDLEY_SUNPRO_VERSION JSON_HEDLEY_VERSION_ENCODE((__SUNPRO_C >> 8) & 0xf, (__SUNPRO_C >> 4) & 0xf, (__SUNPRO_C) & 0xf)
+#elif defined(__SUNPRO_CC) && (__SUNPRO_CC > 0x1000)
+    #define JSON_HEDLEY_SUNPRO_VERSION JSON_HEDLEY_VERSION_ENCODE((((__SUNPRO_CC >> 16) & 0xf) * 10) + ((__SUNPRO_CC >> 12) & 0xf), (((__SUNPRO_CC >> 8) & 0xf) * 10) + ((__SUNPRO_CC >> 4) & 0xf), (__SUNPRO_CC & 0xf) * 10)
+#elif defined(__SUNPRO_CC)
+    #define JSON_HEDLEY_SUNPRO_VERSION JSON_HEDLEY_VERSION_ENCODE((__SUNPRO_CC >> 8) & 0xf, (__SUNPRO_CC >> 4) & 0xf, (__SUNPRO_CC) & 0xf)
+#endif
+
+#if defined(JSON_HEDLEY_SUNPRO_VERSION_CHECK)
+    #undef JSON_HEDLEY_SUNPRO_VERSION_CHECK
+#endif
+#if defined(JSON_HEDLEY_SUNPRO_VERSION)
+    #define JSON_HEDLEY_SUNPRO_VERSION_CHECK(major,minor,patch) (JSON_HEDLEY_SUNPRO_VERSION >= JSON_HEDLEY_VERSION_ENCODE(major, minor, patch))
+#else
+    #define JSON_HEDLEY_SUNPRO_VERSION_CHECK(major,minor,patch) (0)
+#endif
+
+#if defined(JSON_HEDLEY_EMSCRIPTEN_VERSION)
+    #undef JSON_HEDLEY_EMSCRIPTEN_VERSION
+#endif
+#if defined(__EMSCRIPTEN__)
+    #define JSON_HEDLEY_EMSCRIPTEN_VERSION JSON_HEDLEY_VERSION_ENCODE(__EMSCRIPTEN_major__, __EMSCRIPTEN_minor__, __EMSCRIPTEN_tiny__)
+#endif
+
+#if defined(JSON_HEDLEY_EMSCRIPTEN_VERSION_CHECK)
+    #undef JSON_HEDLEY_EMSCRIPTEN_VERSION_CHECK
+#endif
+#if defined(JSON_HEDLEY_EMSCRIPTEN_VERSION)
+    #define JSON_HEDLEY_EMSCRIPTEN_VERSION_CHECK(major,minor,patch) (JSON_HEDLEY_EMSCRIPTEN_VERSION >= JSON_HEDLEY_VERSION_ENCODE(major, minor, patch))
+#else
+    #define JSON_HEDLEY_EMSCRIPTEN_VERSION_CHECK(major,minor,patch) (0)
+#endif
+
+#if defined(JSON_HEDLEY_ARM_VERSION)
+    #undef JSON_HEDLEY_ARM_VERSION
+#endif
+#if defined(__CC_ARM) && defined(__ARMCOMPILER_VERSION)
+    #define JSON_HEDLEY_ARM_VERSION JSON_HEDLEY_VERSION_ENCODE(__ARMCOMPILER_VERSION / 1000000, (__ARMCOMPILER_VERSION % 1000000) / 10000, (__ARMCOMPILER_VERSION % 10000) / 100)
+#elif defined(__CC_ARM) && defined(__ARMCC_VERSION)
+    #define JSON_HEDLEY_ARM_VERSION JSON_HEDLEY_VERSION_ENCODE(__ARMCC_VERSION / 1000000, (__ARMCC_VERSION % 1000000) / 10000, (__ARMCC_VERSION % 10000) / 100)
+#endif
+
+#if defined(JSON_HEDLEY_ARM_VERSION_CHECK)
+    #undef JSON_HEDLEY_ARM_VERSION_CHECK
+#endif
+#if defined(JSON_HEDLEY_ARM_VERSION)
+    #define JSON_HEDLEY_ARM_VERSION_CHECK(major,minor,patch) (JSON_HEDLEY_ARM_VERSION >= JSON_HEDLEY_VERSION_ENCODE(major, minor, patch))
+#else
+    #define JSON_HEDLEY_ARM_VERSION_CHECK(major,minor,patch) (0)
+#endif
+
+#if defined(JSON_HEDLEY_IBM_VERSION)
+    #undef JSON_HEDLEY_IBM_VERSION
+#endif
+#if defined(__ibmxl__)
+    #define JSON_HEDLEY_IBM_VERSION JSON_HEDLEY_VERSION_ENCODE(__ibmxl_version__, __ibmxl_release__, __ibmxl_modification__)
+#elif defined(__xlC__) && defined(__xlC_ver__)
+    #define JSON_HEDLEY_IBM_VERSION JSON_HEDLEY_VERSION_ENCODE(__xlC__ >> 8, __xlC__ & 0xff, (__xlC_ver__ >> 8) & 0xff)
+#elif defined(__xlC__)
+    #define JSON_HEDLEY_IBM_VERSION JSON_HEDLEY_VERSION_ENCODE(__xlC__ >> 8, __xlC__ & 0xff, 0)
+#endif
+
+#if defined(JSON_HEDLEY_IBM_VERSION_CHECK)
+    #undef JSON_HEDLEY_IBM_VERSION_CHECK
+#endif
+#if defined(JSON_HEDLEY_IBM_VERSION)
+    #define JSON_HEDLEY_IBM_VERSION_CHECK(major,minor,patch) (JSON_HEDLEY_IBM_VERSION >= JSON_HEDLEY_VERSION_ENCODE(major, minor, patch))
+#else
+    #define JSON_HEDLEY_IBM_VERSION_CHECK(major,minor,patch) (0)
+#endif
+
+#if defined(JSON_HEDLEY_TI_VERSION)
+    #undef JSON_HEDLEY_TI_VERSION
+#endif
+#if \
+    defined(__TI_COMPILER_VERSION__) && \
+    ( \
+      defined(__TMS470__) || defined(__TI_ARM__) || \
+      defined(__MSP430__) || \
+      defined(__TMS320C2000__) \
+    )
+#if (__TI_COMPILER_VERSION__ >= 16000000)
+    #define JSON_HEDLEY_TI_VERSION JSON_HEDLEY_VERSION_ENCODE(__TI_COMPILER_VERSION__ / 1000000, (__TI_COMPILER_VERSION__ % 1000000) / 1000, (__TI_COMPILER_VERSION__ % 1000))
+#endif
+#endif
+
+#if defined(JSON_HEDLEY_TI_VERSION_CHECK)
+    #undef JSON_HEDLEY_TI_VERSION_CHECK
+#endif
+#if defined(JSON_HEDLEY_TI_VERSION)
+    #define JSON_HEDLEY_TI_VERSION_CHECK(major,minor,patch) (JSON_HEDLEY_TI_VERSION >= JSON_HEDLEY_VERSION_ENCODE(major, minor, patch))
+#else
+    #define JSON_HEDLEY_TI_VERSION_CHECK(major,minor,patch) (0)
+#endif
+
+#if defined(JSON_HEDLEY_TI_CL2000_VERSION)
+    #undef JSON_HEDLEY_TI_CL2000_VERSION
+#endif
+#if defined(__TI_COMPILER_VERSION__) && defined(__TMS320C2000__)
+    #define JSON_HEDLEY_TI_CL2000_VERSION JSON_HEDLEY_VERSION_ENCODE(__TI_COMPILER_VERSION__ / 1000000, (__TI_COMPILER_VERSION__ % 1000000) / 1000, (__TI_COMPILER_VERSION__ % 1000))
+#endif
+
+#if defined(JSON_HEDLEY_TI_CL2000_VERSION_CHECK)
+    #undef JSON_HEDLEY_TI_CL2000_VERSION_CHECK
+#endif
+#if defined(JSON_HEDLEY_TI_CL2000_VERSION)
+    #define JSON_HEDLEY_TI_CL2000_VERSION_CHECK(major,minor,patch) (JSON_HEDLEY_TI_CL2000_VERSION >= JSON_HEDLEY_VERSION_ENCODE(major, minor, patch))
+#else
+    #define JSON_HEDLEY_TI_CL2000_VERSION_CHECK(major,minor,patch) (0)
+#endif
+
+#if defined(JSON_HEDLEY_TI_CL430_VERSION)
+    #undef JSON_HEDLEY_TI_CL430_VERSION
+#endif
+#if defined(__TI_COMPILER_VERSION__) && defined(__MSP430__)
+    #define JSON_HEDLEY_TI_CL430_VERSION JSON_HEDLEY_VERSION_ENCODE(__TI_COMPILER_VERSION__ / 1000000, (__TI_COMPILER_VERSION__ % 1000000) / 1000, (__TI_COMPILER_VERSION__ % 1000))
+#endif
+
+#if defined(JSON_HEDLEY_TI_CL430_VERSION_CHECK)
+    #undef JSON_HEDLEY_TI_CL430_VERSION_CHECK
+#endif
+#if defined(JSON_HEDLEY_TI_CL430_VERSION)
+    #define JSON_HEDLEY_TI_CL430_VERSION_CHECK(major,minor,patch) (JSON_HEDLEY_TI_CL430_VERSION >= JSON_HEDLEY_VERSION_ENCODE(major, minor, patch))
+#else
+    #define JSON_HEDLEY_TI_CL430_VERSION_CHECK(major,minor,patch) (0)
+#endif
+
+#if defined(JSON_HEDLEY_TI_ARMCL_VERSION)
+    #undef JSON_HEDLEY_TI_ARMCL_VERSION
+#endif
+#if defined(__TI_COMPILER_VERSION__) && (defined(__TMS470__) || defined(__TI_ARM__))
+    #define JSON_HEDLEY_TI_ARMCL_VERSION JSON_HEDLEY_VERSION_ENCODE(__TI_COMPILER_VERSION__ / 1000000, (__TI_COMPILER_VERSION__ % 1000000) / 1000, (__TI_COMPILER_VERSION__ % 1000))
+#endif
+
+#if defined(JSON_HEDLEY_TI_ARMCL_VERSION_CHECK)
+    #undef JSON_HEDLEY_TI_ARMCL_VERSION_CHECK
+#endif
+#if defined(JSON_HEDLEY_TI_ARMCL_VERSION)
+    #define JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(major,minor,patch) (JSON_HEDLEY_TI_ARMCL_VERSION >= JSON_HEDLEY_VERSION_ENCODE(major, minor, patch))
+#else
+    #define JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(major,minor,patch) (0)
+#endif
+
+#if defined(JSON_HEDLEY_TI_CL6X_VERSION)
+    #undef JSON_HEDLEY_TI_CL6X_VERSION
+#endif
+#if defined(__TI_COMPILER_VERSION__) && defined(__TMS320C6X__)
+    #define JSON_HEDLEY_TI_CL6X_VERSION JSON_HEDLEY_VERSION_ENCODE(__TI_COMPILER_VERSION__ / 1000000, (__TI_COMPILER_VERSION__ % 1000000) / 1000, (__TI_COMPILER_VERSION__ % 1000))
+#endif
+
+#if defined(JSON_HEDLEY_TI_CL6X_VERSION_CHECK)
+    #undef JSON_HEDLEY_TI_CL6X_VERSION_CHECK
+#endif
+#if defined(JSON_HEDLEY_TI_CL6X_VERSION)
+    #define JSON_HEDLEY_TI_CL6X_VERSION_CHECK(major,minor,patch) (JSON_HEDLEY_TI_CL6X_VERSION >= JSON_HEDLEY_VERSION_ENCODE(major, minor, patch))
+#else
+    #define JSON_HEDLEY_TI_CL6X_VERSION_CHECK(major,minor,patch) (0)
+#endif
+
+#if defined(JSON_HEDLEY_TI_CL7X_VERSION)
+    #undef JSON_HEDLEY_TI_CL7X_VERSION
+#endif
+#if defined(__TI_COMPILER_VERSION__) && defined(__C7000__)
+    #define JSON_HEDLEY_TI_CL7X_VERSION JSON_HEDLEY_VERSION_ENCODE(__TI_COMPILER_VERSION__ / 1000000, (__TI_COMPILER_VERSION__ % 1000000) / 1000, (__TI_COMPILER_VERSION__ % 1000))
+#endif
+
+#if defined(JSON_HEDLEY_TI_CL7X_VERSION_CHECK)
+    #undef JSON_HEDLEY_TI_CL7X_VERSION_CHECK
+#endif
+#if defined(JSON_HEDLEY_TI_CL7X_VERSION)
+    #define JSON_HEDLEY_TI_CL7X_VERSION_CHECK(major,minor,patch) (JSON_HEDLEY_TI_CL7X_VERSION >= JSON_HEDLEY_VERSION_ENCODE(major, minor, patch))
+#else
+    #define JSON_HEDLEY_TI_CL7X_VERSION_CHECK(major,minor,patch) (0)
+#endif
+
+#if defined(JSON_HEDLEY_TI_CLPRU_VERSION)
+    #undef JSON_HEDLEY_TI_CLPRU_VERSION
+#endif
+#if defined(__TI_COMPILER_VERSION__) && defined(__PRU__)
+    #define JSON_HEDLEY_TI_CLPRU_VERSION JSON_HEDLEY_VERSION_ENCODE(__TI_COMPILER_VERSION__ / 1000000, (__TI_COMPILER_VERSION__ % 1000000) / 1000, (__TI_COMPILER_VERSION__ % 1000))
+#endif
+
+#if defined(JSON_HEDLEY_TI_CLPRU_VERSION_CHECK)
+    #undef JSON_HEDLEY_TI_CLPRU_VERSION_CHECK
+#endif
+#if defined(JSON_HEDLEY_TI_CLPRU_VERSION)
+    #define JSON_HEDLEY_TI_CLPRU_VERSION_CHECK(major,minor,patch) (JSON_HEDLEY_TI_CLPRU_VERSION >= JSON_HEDLEY_VERSION_ENCODE(major, minor, patch))
+#else
+    #define JSON_HEDLEY_TI_CLPRU_VERSION_CHECK(major,minor,patch) (0)
+#endif
+
+#if defined(JSON_HEDLEY_CRAY_VERSION)
+    #undef JSON_HEDLEY_CRAY_VERSION
+#endif
+#if defined(_CRAYC)
+    #if defined(_RELEASE_PATCHLEVEL)
+        #define JSON_HEDLEY_CRAY_VERSION JSON_HEDLEY_VERSION_ENCODE(_RELEASE_MAJOR, _RELEASE_MINOR, _RELEASE_PATCHLEVEL)
+    #else
+        #define JSON_HEDLEY_CRAY_VERSION JSON_HEDLEY_VERSION_ENCODE(_RELEASE_MAJOR, _RELEASE_MINOR, 0)
+    #endif
+#endif
+
+#if defined(JSON_HEDLEY_CRAY_VERSION_CHECK)
+    #undef JSON_HEDLEY_CRAY_VERSION_CHECK
+#endif
+#if defined(JSON_HEDLEY_CRAY_VERSION)
+    #define JSON_HEDLEY_CRAY_VERSION_CHECK(major,minor,patch) (JSON_HEDLEY_CRAY_VERSION >= JSON_HEDLEY_VERSION_ENCODE(major, minor, patch))
+#else
+    #define JSON_HEDLEY_CRAY_VERSION_CHECK(major,minor,patch) (0)
+#endif
+
+#if defined(JSON_HEDLEY_IAR_VERSION)
+    #undef JSON_HEDLEY_IAR_VERSION
+#endif
+#if defined(__IAR_SYSTEMS_ICC__)
+    #if __VER__ > 1000
+        #define JSON_HEDLEY_IAR_VERSION JSON_HEDLEY_VERSION_ENCODE((__VER__ / 1000000), ((__VER__ / 1000) % 1000), (__VER__ % 1000))
+    #else
+        #define JSON_HEDLEY_IAR_VERSION JSON_HEDLEY_VERSION_ENCODE(__VER__ / 100, __VER__ % 100, 0)
+    #endif
+#endif
+
+#if defined(JSON_HEDLEY_IAR_VERSION_CHECK)
+    #undef JSON_HEDLEY_IAR_VERSION_CHECK
+#endif
+#if defined(JSON_HEDLEY_IAR_VERSION)
+    #define JSON_HEDLEY_IAR_VERSION_CHECK(major,minor,patch) (JSON_HEDLEY_IAR_VERSION >= JSON_HEDLEY_VERSION_ENCODE(major, minor, patch))
+#else
+    #define JSON_HEDLEY_IAR_VERSION_CHECK(major,minor,patch) (0)
+#endif
+
+#if defined(JSON_HEDLEY_TINYC_VERSION)
+    #undef JSON_HEDLEY_TINYC_VERSION
+#endif
+#if defined(__TINYC__)
+    #define JSON_HEDLEY_TINYC_VERSION JSON_HEDLEY_VERSION_ENCODE(__TINYC__ / 1000, (__TINYC__ / 100) % 10, __TINYC__ % 100)
+#endif
+
+#if defined(JSON_HEDLEY_TINYC_VERSION_CHECK)
+    #undef JSON_HEDLEY_TINYC_VERSION_CHECK
+#endif
+#if defined(JSON_HEDLEY_TINYC_VERSION)
+    #define JSON_HEDLEY_TINYC_VERSION_CHECK(major,minor,patch) (JSON_HEDLEY_TINYC_VERSION >= JSON_HEDLEY_VERSION_ENCODE(major, minor, patch))
+#else
+    #define JSON_HEDLEY_TINYC_VERSION_CHECK(major,minor,patch) (0)
+#endif
+
+#if defined(JSON_HEDLEY_DMC_VERSION)
+    #undef JSON_HEDLEY_DMC_VERSION
+#endif
+#if defined(__DMC__)
+    #define JSON_HEDLEY_DMC_VERSION JSON_HEDLEY_VERSION_ENCODE(__DMC__ >> 8, (__DMC__ >> 4) & 0xf, __DMC__ & 0xf)
+#endif
+
+#if defined(JSON_HEDLEY_DMC_VERSION_CHECK)
+    #undef JSON_HEDLEY_DMC_VERSION_CHECK
+#endif
+#if defined(JSON_HEDLEY_DMC_VERSION)
+    #define JSON_HEDLEY_DMC_VERSION_CHECK(major,minor,patch) (JSON_HEDLEY_DMC_VERSION >= JSON_HEDLEY_VERSION_ENCODE(major, minor, patch))
+#else
+    #define JSON_HEDLEY_DMC_VERSION_CHECK(major,minor,patch) (0)
+#endif
+
+#if defined(JSON_HEDLEY_COMPCERT_VERSION)
+    #undef JSON_HEDLEY_COMPCERT_VERSION
+#endif
+#if defined(__COMPCERT_VERSION__)
+    #define JSON_HEDLEY_COMPCERT_VERSION JSON_HEDLEY_VERSION_ENCODE(__COMPCERT_VERSION__ / 10000, (__COMPCERT_VERSION__ / 100) % 100, __COMPCERT_VERSION__ % 100)
+#endif
+
+#if defined(JSON_HEDLEY_COMPCERT_VERSION_CHECK)
+    #undef JSON_HEDLEY_COMPCERT_VERSION_CHECK
+#endif
+#if defined(JSON_HEDLEY_COMPCERT_VERSION)
+    #define JSON_HEDLEY_COMPCERT_VERSION_CHECK(major,minor,patch) (JSON_HEDLEY_COMPCERT_VERSION >= JSON_HEDLEY_VERSION_ENCODE(major, minor, patch))
+#else
+    #define JSON_HEDLEY_COMPCERT_VERSION_CHECK(major,minor,patch) (0)
+#endif
+
+#if defined(JSON_HEDLEY_PELLES_VERSION)
+    #undef JSON_HEDLEY_PELLES_VERSION
+#endif
+#if defined(__POCC__)
+    #define JSON_HEDLEY_PELLES_VERSION JSON_HEDLEY_VERSION_ENCODE(__POCC__ / 100, __POCC__ % 100, 0)
+#endif
+
+#if defined(JSON_HEDLEY_PELLES_VERSION_CHECK)
+    #undef JSON_HEDLEY_PELLES_VERSION_CHECK
+#endif
+#if defined(JSON_HEDLEY_PELLES_VERSION)
+    #define JSON_HEDLEY_PELLES_VERSION_CHECK(major,minor,patch) (JSON_HEDLEY_PELLES_VERSION >= JSON_HEDLEY_VERSION_ENCODE(major, minor, patch))
+#else
+    #define JSON_HEDLEY_PELLES_VERSION_CHECK(major,minor,patch) (0)
+#endif
+
+#if defined(JSON_HEDLEY_MCST_LCC_VERSION)
+    #undef JSON_HEDLEY_MCST_LCC_VERSION
+#endif
+#if defined(__LCC__) && defined(__LCC_MINOR__)
+    #define JSON_HEDLEY_MCST_LCC_VERSION JSON_HEDLEY_VERSION_ENCODE(__LCC__ / 100, __LCC__ % 100, __LCC_MINOR__)
+#endif
+
+#if defined(JSON_HEDLEY_MCST_LCC_VERSION_CHECK)
+    #undef JSON_HEDLEY_MCST_LCC_VERSION_CHECK
+#endif
+#if defined(JSON_HEDLEY_MCST_LCC_VERSION)
+    #define JSON_HEDLEY_MCST_LCC_VERSION_CHECK(major,minor,patch) (JSON_HEDLEY_MCST_LCC_VERSION >= JSON_HEDLEY_VERSION_ENCODE(major, minor, patch))
+#else
+    #define JSON_HEDLEY_MCST_LCC_VERSION_CHECK(major,minor,patch) (0)
+#endif
+
+#if defined(JSON_HEDLEY_GCC_VERSION)
+    #undef JSON_HEDLEY_GCC_VERSION
+#endif
+#if \
+    defined(JSON_HEDLEY_GNUC_VERSION) && \
+    !defined(__clang__) && \
+    !defined(JSON_HEDLEY_INTEL_VERSION) && \
+    !defined(JSON_HEDLEY_PGI_VERSION) && \
+    !defined(JSON_HEDLEY_ARM_VERSION) && \
+    !defined(JSON_HEDLEY_CRAY_VERSION) && \
+    !defined(JSON_HEDLEY_TI_VERSION) && \
+    !defined(JSON_HEDLEY_TI_ARMCL_VERSION) && \
+    !defined(JSON_HEDLEY_TI_CL430_VERSION) && \
+    !defined(JSON_HEDLEY_TI_CL2000_VERSION) && \
+    !defined(JSON_HEDLEY_TI_CL6X_VERSION) && \
+    !defined(JSON_HEDLEY_TI_CL7X_VERSION) && \
+    !defined(JSON_HEDLEY_TI_CLPRU_VERSION) && \
+    !defined(__COMPCERT__) && \
+    !defined(JSON_HEDLEY_MCST_LCC_VERSION)
+    #define JSON_HEDLEY_GCC_VERSION JSON_HEDLEY_GNUC_VERSION
+#endif
+
+#if defined(JSON_HEDLEY_GCC_VERSION_CHECK)
+    #undef JSON_HEDLEY_GCC_VERSION_CHECK
+#endif
+#if defined(JSON_HEDLEY_GCC_VERSION)
+    #define JSON_HEDLEY_GCC_VERSION_CHECK(major,minor,patch) (JSON_HEDLEY_GCC_VERSION >= JSON_HEDLEY_VERSION_ENCODE(major, minor, patch))
+#else
+    #define JSON_HEDLEY_GCC_VERSION_CHECK(major,minor,patch) (0)
+#endif
+
+#if defined(JSON_HEDLEY_HAS_ATTRIBUTE)
+    #undef JSON_HEDLEY_HAS_ATTRIBUTE
+#endif
+#if \
+  defined(__has_attribute) && \
+  ( \
+    (!defined(JSON_HEDLEY_IAR_VERSION) || JSON_HEDLEY_IAR_VERSION_CHECK(8,5,9)) \
+  )
+#  define JSON_HEDLEY_HAS_ATTRIBUTE(attribute) __has_attribute(attribute)
+#else
+#  define JSON_HEDLEY_HAS_ATTRIBUTE(attribute) (0)
+#endif
+
+#if defined(JSON_HEDLEY_GNUC_HAS_ATTRIBUTE)
+    #undef JSON_HEDLEY_GNUC_HAS_ATTRIBUTE
+#endif
+#if defined(__has_attribute)
+    #define JSON_HEDLEY_GNUC_HAS_ATTRIBUTE(attribute,major,minor,patch) JSON_HEDLEY_HAS_ATTRIBUTE(attribute)
+#else
+    #define JSON_HEDLEY_GNUC_HAS_ATTRIBUTE(attribute,major,minor,patch) JSON_HEDLEY_GNUC_VERSION_CHECK(major,minor,patch)
+#endif
+
+#if defined(JSON_HEDLEY_GCC_HAS_ATTRIBUTE)
+    #undef JSON_HEDLEY_GCC_HAS_ATTRIBUTE
+#endif
+#if defined(__has_attribute)
+    #define JSON_HEDLEY_GCC_HAS_ATTRIBUTE(attribute,major,minor,patch) JSON_HEDLEY_HAS_ATTRIBUTE(attribute)
+#else
+    #define JSON_HEDLEY_GCC_HAS_ATTRIBUTE(attribute,major,minor,patch) JSON_HEDLEY_GCC_VERSION_CHECK(major,minor,patch)
+#endif
+
+#if defined(JSON_HEDLEY_HAS_CPP_ATTRIBUTE)
+    #undef JSON_HEDLEY_HAS_CPP_ATTRIBUTE
+#endif
+#if \
+    defined(__has_cpp_attribute) && \
+    defined(__cplusplus) && \
+    (!defined(JSON_HEDLEY_SUNPRO_VERSION) || JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,15,0))
+    #define JSON_HEDLEY_HAS_CPP_ATTRIBUTE(attribute) __has_cpp_attribute(attribute)
+#else
+    #define JSON_HEDLEY_HAS_CPP_ATTRIBUTE(attribute) (0)
+#endif
+
+#if defined(JSON_HEDLEY_HAS_CPP_ATTRIBUTE_NS)
+    #undef JSON_HEDLEY_HAS_CPP_ATTRIBUTE_NS
+#endif
+#if !defined(__cplusplus) || !defined(__has_cpp_attribute)
+    #define JSON_HEDLEY_HAS_CPP_ATTRIBUTE_NS(ns,attribute) (0)
+#elif \
+    !defined(JSON_HEDLEY_PGI_VERSION) && \
+    !defined(JSON_HEDLEY_IAR_VERSION) && \
+    (!defined(JSON_HEDLEY_SUNPRO_VERSION) || JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,15,0)) && \
+    (!defined(JSON_HEDLEY_MSVC_VERSION) || JSON_HEDLEY_MSVC_VERSION_CHECK(19,20,0))
+    #define JSON_HEDLEY_HAS_CPP_ATTRIBUTE_NS(ns,attribute) JSON_HEDLEY_HAS_CPP_ATTRIBUTE(ns::attribute)
+#else
+    #define JSON_HEDLEY_HAS_CPP_ATTRIBUTE_NS(ns,attribute) (0)
+#endif
+
+#if defined(JSON_HEDLEY_GNUC_HAS_CPP_ATTRIBUTE)
+    #undef JSON_HEDLEY_GNUC_HAS_CPP_ATTRIBUTE
+#endif
+#if defined(__has_cpp_attribute) && defined(__cplusplus)
+    #define JSON_HEDLEY_GNUC_HAS_CPP_ATTRIBUTE(attribute,major,minor,patch) __has_cpp_attribute(attribute)
+#else
+    #define JSON_HEDLEY_GNUC_HAS_CPP_ATTRIBUTE(attribute,major,minor,patch) JSON_HEDLEY_GNUC_VERSION_CHECK(major,minor,patch)
+#endif
+
+#if defined(JSON_HEDLEY_GCC_HAS_CPP_ATTRIBUTE)
+    #undef JSON_HEDLEY_GCC_HAS_CPP_ATTRIBUTE
+#endif
+#if defined(__has_cpp_attribute) && defined(__cplusplus)
+    #define JSON_HEDLEY_GCC_HAS_CPP_ATTRIBUTE(attribute,major,minor,patch) __has_cpp_attribute(attribute)
+#else
+    #define JSON_HEDLEY_GCC_HAS_CPP_ATTRIBUTE(attribute,major,minor,patch) JSON_HEDLEY_GCC_VERSION_CHECK(major,minor,patch)
+#endif
+
+#if defined(JSON_HEDLEY_HAS_BUILTIN)
+    #undef JSON_HEDLEY_HAS_BUILTIN
+#endif
+#if defined(__has_builtin)
+    #define JSON_HEDLEY_HAS_BUILTIN(builtin) __has_builtin(builtin)
+#else
+    #define JSON_HEDLEY_HAS_BUILTIN(builtin) (0)
+#endif
+
+#if defined(JSON_HEDLEY_GNUC_HAS_BUILTIN)
+    #undef JSON_HEDLEY_GNUC_HAS_BUILTIN
+#endif
+#if defined(__has_builtin)
+    #define JSON_HEDLEY_GNUC_HAS_BUILTIN(builtin,major,minor,patch) __has_builtin(builtin)
+#else
+    #define JSON_HEDLEY_GNUC_HAS_BUILTIN(builtin,major,minor,patch) JSON_HEDLEY_GNUC_VERSION_CHECK(major,minor,patch)
+#endif
+
+#if defined(JSON_HEDLEY_GCC_HAS_BUILTIN)
+    #undef JSON_HEDLEY_GCC_HAS_BUILTIN
+#endif
+#if defined(__has_builtin)
+    #define JSON_HEDLEY_GCC_HAS_BUILTIN(builtin,major,minor,patch) __has_builtin(builtin)
+#else
+    #define JSON_HEDLEY_GCC_HAS_BUILTIN(builtin,major,minor,patch) JSON_HEDLEY_GCC_VERSION_CHECK(major,minor,patch)
+#endif
+
+#if defined(JSON_HEDLEY_HAS_FEATURE)
+    #undef JSON_HEDLEY_HAS_FEATURE
+#endif
+#if defined(__has_feature)
+    #define JSON_HEDLEY_HAS_FEATURE(feature) __has_feature(feature)
+#else
+    #define JSON_HEDLEY_HAS_FEATURE(feature) (0)
+#endif
+
+#if defined(JSON_HEDLEY_GNUC_HAS_FEATURE)
+    #undef JSON_HEDLEY_GNUC_HAS_FEATURE
+#endif
+#if defined(__has_feature)
+    #define JSON_HEDLEY_GNUC_HAS_FEATURE(feature,major,minor,patch) __has_feature(feature)
+#else
+    #define JSON_HEDLEY_GNUC_HAS_FEATURE(feature,major,minor,patch) JSON_HEDLEY_GNUC_VERSION_CHECK(major,minor,patch)
+#endif
+
+#if defined(JSON_HEDLEY_GCC_HAS_FEATURE)
+    #undef JSON_HEDLEY_GCC_HAS_FEATURE
+#endif
+#if defined(__has_feature)
+    #define JSON_HEDLEY_GCC_HAS_FEATURE(feature,major,minor,patch) __has_feature(feature)
+#else
+    #define JSON_HEDLEY_GCC_HAS_FEATURE(feature,major,minor,patch) JSON_HEDLEY_GCC_VERSION_CHECK(major,minor,patch)
+#endif
+
+#if defined(JSON_HEDLEY_HAS_EXTENSION)
+    #undef JSON_HEDLEY_HAS_EXTENSION
+#endif
+#if defined(__has_extension)
+    #define JSON_HEDLEY_HAS_EXTENSION(extension) __has_extension(extension)
+#else
+    #define JSON_HEDLEY_HAS_EXTENSION(extension) (0)
+#endif
+
+#if defined(JSON_HEDLEY_GNUC_HAS_EXTENSION)
+    #undef JSON_HEDLEY_GNUC_HAS_EXTENSION
+#endif
+#if defined(__has_extension)
+    #define JSON_HEDLEY_GNUC_HAS_EXTENSION(extension,major,minor,patch) __has_extension(extension)
+#else
+    #define JSON_HEDLEY_GNUC_HAS_EXTENSION(extension,major,minor,patch) JSON_HEDLEY_GNUC_VERSION_CHECK(major,minor,patch)
+#endif
+
+#if defined(JSON_HEDLEY_GCC_HAS_EXTENSION)
+    #undef JSON_HEDLEY_GCC_HAS_EXTENSION
+#endif
+#if defined(__has_extension)
+    #define JSON_HEDLEY_GCC_HAS_EXTENSION(extension,major,minor,patch) __has_extension(extension)
+#else
+    #define JSON_HEDLEY_GCC_HAS_EXTENSION(extension,major,minor,patch) JSON_HEDLEY_GCC_VERSION_CHECK(major,minor,patch)
+#endif
+
+#if defined(JSON_HEDLEY_HAS_DECLSPEC_ATTRIBUTE)
+    #undef JSON_HEDLEY_HAS_DECLSPEC_ATTRIBUTE
+#endif
+#if defined(__has_declspec_attribute)
+    #define JSON_HEDLEY_HAS_DECLSPEC_ATTRIBUTE(attribute) __has_declspec_attribute(attribute)
+#else
+    #define JSON_HEDLEY_HAS_DECLSPEC_ATTRIBUTE(attribute) (0)
+#endif
+
+#if defined(JSON_HEDLEY_GNUC_HAS_DECLSPEC_ATTRIBUTE)
+    #undef JSON_HEDLEY_GNUC_HAS_DECLSPEC_ATTRIBUTE
+#endif
+#if defined(__has_declspec_attribute)
+    #define JSON_HEDLEY_GNUC_HAS_DECLSPEC_ATTRIBUTE(attribute,major,minor,patch) __has_declspec_attribute(attribute)
+#else
+    #define JSON_HEDLEY_GNUC_HAS_DECLSPEC_ATTRIBUTE(attribute,major,minor,patch) JSON_HEDLEY_GNUC_VERSION_CHECK(major,minor,patch)
+#endif
+
+#if defined(JSON_HEDLEY_GCC_HAS_DECLSPEC_ATTRIBUTE)
+    #undef JSON_HEDLEY_GCC_HAS_DECLSPEC_ATTRIBUTE
+#endif
+#if defined(__has_declspec_attribute)
+    #define JSON_HEDLEY_GCC_HAS_DECLSPEC_ATTRIBUTE(attribute,major,minor,patch) __has_declspec_attribute(attribute)
+#else
+    #define JSON_HEDLEY_GCC_HAS_DECLSPEC_ATTRIBUTE(attribute,major,minor,patch) JSON_HEDLEY_GCC_VERSION_CHECK(major,minor,patch)
+#endif
+
+#if defined(JSON_HEDLEY_HAS_WARNING)
+    #undef JSON_HEDLEY_HAS_WARNING
+#endif
+#if defined(__has_warning)
+    #define JSON_HEDLEY_HAS_WARNING(warning) __has_warning(warning)
+#else
+    #define JSON_HEDLEY_HAS_WARNING(warning) (0)
+#endif
+
+#if defined(JSON_HEDLEY_GNUC_HAS_WARNING)
+    #undef JSON_HEDLEY_GNUC_HAS_WARNING
+#endif
+#if defined(__has_warning)
+    #define JSON_HEDLEY_GNUC_HAS_WARNING(warning,major,minor,patch) __has_warning(warning)
+#else
+    #define JSON_HEDLEY_GNUC_HAS_WARNING(warning,major,minor,patch) JSON_HEDLEY_GNUC_VERSION_CHECK(major,minor,patch)
+#endif
+
+#if defined(JSON_HEDLEY_GCC_HAS_WARNING)
+    #undef JSON_HEDLEY_GCC_HAS_WARNING
+#endif
+#if defined(__has_warning)
+    #define JSON_HEDLEY_GCC_HAS_WARNING(warning,major,minor,patch) __has_warning(warning)
+#else
+    #define JSON_HEDLEY_GCC_HAS_WARNING(warning,major,minor,patch) JSON_HEDLEY_GCC_VERSION_CHECK(major,minor,patch)
+#endif
+
+#if \
+    (defined(__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L)) || \
+    defined(__clang__) || \
+    JSON_HEDLEY_GCC_VERSION_CHECK(3,0,0) || \
+    JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) || \
+    JSON_HEDLEY_IAR_VERSION_CHECK(8,0,0) || \
+    JSON_HEDLEY_PGI_VERSION_CHECK(18,4,0) || \
+    JSON_HEDLEY_ARM_VERSION_CHECK(4,1,0) || \
+    JSON_HEDLEY_TI_VERSION_CHECK(15,12,0) || \
+    JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(4,7,0) || \
+    JSON_HEDLEY_TI_CL430_VERSION_CHECK(2,0,1) || \
+    JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,1,0) || \
+    JSON_HEDLEY_TI_CL6X_VERSION_CHECK(7,0,0) || \
+    JSON_HEDLEY_TI_CL7X_VERSION_CHECK(1,2,0) || \
+    JSON_HEDLEY_TI_CLPRU_VERSION_CHECK(2,1,0) || \
+    JSON_HEDLEY_CRAY_VERSION_CHECK(5,0,0) || \
+    JSON_HEDLEY_TINYC_VERSION_CHECK(0,9,17) || \
+    JSON_HEDLEY_SUNPRO_VERSION_CHECK(8,0,0) || \
+    (JSON_HEDLEY_IBM_VERSION_CHECK(10,1,0) && defined(__C99_PRAGMA_OPERATOR))
+    #define JSON_HEDLEY_PRAGMA(value) _Pragma(#value)
+#elif JSON_HEDLEY_MSVC_VERSION_CHECK(15,0,0)
+    #define JSON_HEDLEY_PRAGMA(value) __pragma(value)
+#else
+    #define JSON_HEDLEY_PRAGMA(value)
+#endif
+
+#if defined(JSON_HEDLEY_DIAGNOSTIC_PUSH)
+    #undef JSON_HEDLEY_DIAGNOSTIC_PUSH
+#endif
+#if defined(JSON_HEDLEY_DIAGNOSTIC_POP)
+    #undef JSON_HEDLEY_DIAGNOSTIC_POP
+#endif
+#if defined(__clang__)
+    #define JSON_HEDLEY_DIAGNOSTIC_PUSH _Pragma("clang diagnostic push")
+    #define JSON_HEDLEY_DIAGNOSTIC_POP _Pragma("clang diagnostic pop")
+#elif JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0)
+    #define JSON_HEDLEY_DIAGNOSTIC_PUSH _Pragma("warning(push)")
+    #define JSON_HEDLEY_DIAGNOSTIC_POP _Pragma("warning(pop)")
+#elif JSON_HEDLEY_GCC_VERSION_CHECK(4,6,0)
+    #define JSON_HEDLEY_DIAGNOSTIC_PUSH _Pragma("GCC diagnostic push")
+    #define JSON_HEDLEY_DIAGNOSTIC_POP _Pragma("GCC diagnostic pop")
+#elif \
+    JSON_HEDLEY_MSVC_VERSION_CHECK(15,0,0) || \
+    JSON_HEDLEY_INTEL_CL_VERSION_CHECK(2021,1,0)
+    #define JSON_HEDLEY_DIAGNOSTIC_PUSH __pragma(warning(push))
+    #define JSON_HEDLEY_DIAGNOSTIC_POP __pragma(warning(pop))
+#elif JSON_HEDLEY_ARM_VERSION_CHECK(5,6,0)
+    #define JSON_HEDLEY_DIAGNOSTIC_PUSH _Pragma("push")
+    #define JSON_HEDLEY_DIAGNOSTIC_POP _Pragma("pop")
+#elif \
+    JSON_HEDLEY_TI_VERSION_CHECK(15,12,0) || \
+    JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(5,2,0) || \
+    JSON_HEDLEY_TI_CL430_VERSION_CHECK(4,4,0) || \
+    JSON_HEDLEY_TI_CL6X_VERSION_CHECK(8,1,0) || \
+    JSON_HEDLEY_TI_CL7X_VERSION_CHECK(1,2,0) || \
+    JSON_HEDLEY_TI_CLPRU_VERSION_CHECK(2,1,0)
+    #define JSON_HEDLEY_DIAGNOSTIC_PUSH _Pragma("diag_push")
+    #define JSON_HEDLEY_DIAGNOSTIC_POP _Pragma("diag_pop")
+#elif JSON_HEDLEY_PELLES_VERSION_CHECK(2,90,0)
+    #define JSON_HEDLEY_DIAGNOSTIC_PUSH _Pragma("warning(push)")
+    #define JSON_HEDLEY_DIAGNOSTIC_POP _Pragma("warning(pop)")
+#else
+    #define JSON_HEDLEY_DIAGNOSTIC_PUSH
+    #define JSON_HEDLEY_DIAGNOSTIC_POP
+#endif
+
+/* JSON_HEDLEY_DIAGNOSTIC_DISABLE_CPP98_COMPAT_WRAP_ is for
+   HEDLEY INTERNAL USE ONLY.  API subject to change without notice. */
+#if defined(JSON_HEDLEY_DIAGNOSTIC_DISABLE_CPP98_COMPAT_WRAP_)
+    #undef JSON_HEDLEY_DIAGNOSTIC_DISABLE_CPP98_COMPAT_WRAP_
+#endif
+#if defined(__cplusplus)
+#  if JSON_HEDLEY_HAS_WARNING("-Wc++98-compat")
+#    if JSON_HEDLEY_HAS_WARNING("-Wc++17-extensions")
+#      if JSON_HEDLEY_HAS_WARNING("-Wc++1z-extensions")
+#        define JSON_HEDLEY_DIAGNOSTIC_DISABLE_CPP98_COMPAT_WRAP_(xpr) \
+    JSON_HEDLEY_DIAGNOSTIC_PUSH \
+    _Pragma("clang diagnostic ignored \"-Wc++98-compat\"") \
+    _Pragma("clang diagnostic ignored \"-Wc++17-extensions\"") \
+    _Pragma("clang diagnostic ignored \"-Wc++1z-extensions\"") \
+    xpr \
+    JSON_HEDLEY_DIAGNOSTIC_POP
+#      else
+#        define JSON_HEDLEY_DIAGNOSTIC_DISABLE_CPP98_COMPAT_WRAP_(xpr) \
+    JSON_HEDLEY_DIAGNOSTIC_PUSH \
+    _Pragma("clang diagnostic ignored \"-Wc++98-compat\"") \
+    _Pragma("clang diagnostic ignored \"-Wc++17-extensions\"") \
+    xpr \
+    JSON_HEDLEY_DIAGNOSTIC_POP
+#      endif
+#    else
+#      define JSON_HEDLEY_DIAGNOSTIC_DISABLE_CPP98_COMPAT_WRAP_(xpr) \
+    JSON_HEDLEY_DIAGNOSTIC_PUSH \
+    _Pragma("clang diagnostic ignored \"-Wc++98-compat\"") \
+    xpr \
+    JSON_HEDLEY_DIAGNOSTIC_POP
+#    endif
+#  endif
+#endif
+#if !defined(JSON_HEDLEY_DIAGNOSTIC_DISABLE_CPP98_COMPAT_WRAP_)
+    #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_CPP98_COMPAT_WRAP_(x) x
+#endif
+
+#if defined(JSON_HEDLEY_CONST_CAST)
+    #undef JSON_HEDLEY_CONST_CAST
+#endif
+#if defined(__cplusplus)
+#  define JSON_HEDLEY_CONST_CAST(T, expr) (const_cast(expr))
+#elif \
+  JSON_HEDLEY_HAS_WARNING("-Wcast-qual") || \
+  JSON_HEDLEY_GCC_VERSION_CHECK(4,6,0) || \
+  JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0)
+#  define JSON_HEDLEY_CONST_CAST(T, expr) (__extension__ ({ \
+        JSON_HEDLEY_DIAGNOSTIC_PUSH \
+        JSON_HEDLEY_DIAGNOSTIC_DISABLE_CAST_QUAL \
+        ((T) (expr)); \
+        JSON_HEDLEY_DIAGNOSTIC_POP \
+    }))
+#else
+#  define JSON_HEDLEY_CONST_CAST(T, expr) ((T) (expr))
+#endif
+
+#if defined(JSON_HEDLEY_REINTERPRET_CAST)
+    #undef JSON_HEDLEY_REINTERPRET_CAST
+#endif
+#if defined(__cplusplus)
+    #define JSON_HEDLEY_REINTERPRET_CAST(T, expr) (reinterpret_cast(expr))
+#else
+    #define JSON_HEDLEY_REINTERPRET_CAST(T, expr) ((T) (expr))
+#endif
+
+#if defined(JSON_HEDLEY_STATIC_CAST)
+    #undef JSON_HEDLEY_STATIC_CAST
+#endif
+#if defined(__cplusplus)
+    #define JSON_HEDLEY_STATIC_CAST(T, expr) (static_cast(expr))
+#else
+    #define JSON_HEDLEY_STATIC_CAST(T, expr) ((T) (expr))
+#endif
+
+#if defined(JSON_HEDLEY_CPP_CAST)
+    #undef JSON_HEDLEY_CPP_CAST
+#endif
+#if defined(__cplusplus)
+#  if JSON_HEDLEY_HAS_WARNING("-Wold-style-cast")
+#    define JSON_HEDLEY_CPP_CAST(T, expr) \
+    JSON_HEDLEY_DIAGNOSTIC_PUSH \
+    _Pragma("clang diagnostic ignored \"-Wold-style-cast\"") \
+    ((T) (expr)) \
+    JSON_HEDLEY_DIAGNOSTIC_POP
+#  elif JSON_HEDLEY_IAR_VERSION_CHECK(8,3,0)
+#    define JSON_HEDLEY_CPP_CAST(T, expr) \
+    JSON_HEDLEY_DIAGNOSTIC_PUSH \
+    _Pragma("diag_suppress=Pe137") \
+    JSON_HEDLEY_DIAGNOSTIC_POP
+#  else
+#    define JSON_HEDLEY_CPP_CAST(T, expr) ((T) (expr))
+#  endif
+#else
+#  define JSON_HEDLEY_CPP_CAST(T, expr) (expr)
+#endif
+
+#if defined(JSON_HEDLEY_DIAGNOSTIC_DISABLE_DEPRECATED)
+    #undef JSON_HEDLEY_DIAGNOSTIC_DISABLE_DEPRECATED
+#endif
+#if JSON_HEDLEY_HAS_WARNING("-Wdeprecated-declarations")
+    #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_DEPRECATED _Pragma("clang diagnostic ignored \"-Wdeprecated-declarations\"")
+#elif JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0)
+    #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_DEPRECATED _Pragma("warning(disable:1478 1786)")
+#elif JSON_HEDLEY_INTEL_CL_VERSION_CHECK(2021,1,0)
+    #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_DEPRECATED __pragma(warning(disable:1478 1786))
+#elif JSON_HEDLEY_PGI_VERSION_CHECK(20,7,0)
+    #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_DEPRECATED _Pragma("diag_suppress 1215,1216,1444,1445")
+#elif JSON_HEDLEY_PGI_VERSION_CHECK(17,10,0)
+    #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_DEPRECATED _Pragma("diag_suppress 1215,1444")
+#elif JSON_HEDLEY_GCC_VERSION_CHECK(4,3,0)
+    #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_DEPRECATED _Pragma("GCC diagnostic ignored \"-Wdeprecated-declarations\"")
+#elif JSON_HEDLEY_MSVC_VERSION_CHECK(15,0,0)
+    #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_DEPRECATED __pragma(warning(disable:4996))
+#elif JSON_HEDLEY_MCST_LCC_VERSION_CHECK(1,25,10)
+    #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_DEPRECATED _Pragma("diag_suppress 1215,1444")
+#elif \
+    JSON_HEDLEY_TI_VERSION_CHECK(15,12,0) || \
+    (JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(4,8,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \
+    JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(5,2,0) || \
+    (JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,0,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \
+    JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,4,0) || \
+    (JSON_HEDLEY_TI_CL430_VERSION_CHECK(4,0,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \
+    JSON_HEDLEY_TI_CL430_VERSION_CHECK(4,3,0) || \
+    (JSON_HEDLEY_TI_CL6X_VERSION_CHECK(7,2,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \
+    JSON_HEDLEY_TI_CL6X_VERSION_CHECK(7,5,0) || \
+    JSON_HEDLEY_TI_CL7X_VERSION_CHECK(1,2,0) || \
+    JSON_HEDLEY_TI_CLPRU_VERSION_CHECK(2,1,0)
+    #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_DEPRECATED _Pragma("diag_suppress 1291,1718")
+#elif JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,13,0) && !defined(__cplusplus)
+    #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_DEPRECATED _Pragma("error_messages(off,E_DEPRECATED_ATT,E_DEPRECATED_ATT_MESS)")
+#elif JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,13,0) && defined(__cplusplus)
+    #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_DEPRECATED _Pragma("error_messages(off,symdeprecated,symdeprecated2)")
+#elif JSON_HEDLEY_IAR_VERSION_CHECK(8,0,0)
+    #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_DEPRECATED _Pragma("diag_suppress=Pe1444,Pe1215")
+#elif JSON_HEDLEY_PELLES_VERSION_CHECK(2,90,0)
+    #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_DEPRECATED _Pragma("warn(disable:2241)")
+#else
+    #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_DEPRECATED
+#endif
+
+#if defined(JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_PRAGMAS)
+    #undef JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_PRAGMAS
+#endif
+#if JSON_HEDLEY_HAS_WARNING("-Wunknown-pragmas")
+    #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_PRAGMAS _Pragma("clang diagnostic ignored \"-Wunknown-pragmas\"")
+#elif JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0)
+    #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_PRAGMAS _Pragma("warning(disable:161)")
+#elif JSON_HEDLEY_INTEL_CL_VERSION_CHECK(2021,1,0)
+    #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_PRAGMAS __pragma(warning(disable:161))
+#elif JSON_HEDLEY_PGI_VERSION_CHECK(17,10,0)
+    #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_PRAGMAS _Pragma("diag_suppress 1675")
+#elif JSON_HEDLEY_GCC_VERSION_CHECK(4,3,0)
+    #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_PRAGMAS _Pragma("GCC diagnostic ignored \"-Wunknown-pragmas\"")
+#elif JSON_HEDLEY_MSVC_VERSION_CHECK(15,0,0)
+    #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_PRAGMAS __pragma(warning(disable:4068))
+#elif \
+    JSON_HEDLEY_TI_VERSION_CHECK(16,9,0) || \
+    JSON_HEDLEY_TI_CL6X_VERSION_CHECK(8,0,0) || \
+    JSON_HEDLEY_TI_CL7X_VERSION_CHECK(1,2,0) || \
+    JSON_HEDLEY_TI_CLPRU_VERSION_CHECK(2,3,0)
+    #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_PRAGMAS _Pragma("diag_suppress 163")
+#elif JSON_HEDLEY_TI_CL6X_VERSION_CHECK(8,0,0)
+    #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_PRAGMAS _Pragma("diag_suppress 163")
+#elif JSON_HEDLEY_IAR_VERSION_CHECK(8,0,0)
+    #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_PRAGMAS _Pragma("diag_suppress=Pe161")
+#elif JSON_HEDLEY_MCST_LCC_VERSION_CHECK(1,25,10)
+    #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_PRAGMAS _Pragma("diag_suppress 161")
+#else
+    #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_PRAGMAS
+#endif
+
+#if defined(JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_CPP_ATTRIBUTES)
+    #undef JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_CPP_ATTRIBUTES
+#endif
+#if JSON_HEDLEY_HAS_WARNING("-Wunknown-attributes")
+    #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_CPP_ATTRIBUTES _Pragma("clang diagnostic ignored \"-Wunknown-attributes\"")
+#elif JSON_HEDLEY_GCC_VERSION_CHECK(4,6,0)
+    #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_CPP_ATTRIBUTES _Pragma("GCC diagnostic ignored \"-Wdeprecated-declarations\"")
+#elif JSON_HEDLEY_INTEL_VERSION_CHECK(17,0,0)
+    #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_CPP_ATTRIBUTES _Pragma("warning(disable:1292)")
+#elif JSON_HEDLEY_INTEL_CL_VERSION_CHECK(2021,1,0)
+    #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_CPP_ATTRIBUTES __pragma(warning(disable:1292))
+#elif JSON_HEDLEY_MSVC_VERSION_CHECK(19,0,0)
+    #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_CPP_ATTRIBUTES __pragma(warning(disable:5030))
+#elif JSON_HEDLEY_PGI_VERSION_CHECK(20,7,0)
+    #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_CPP_ATTRIBUTES _Pragma("diag_suppress 1097,1098")
+#elif JSON_HEDLEY_PGI_VERSION_CHECK(17,10,0)
+    #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_CPP_ATTRIBUTES _Pragma("diag_suppress 1097")
+#elif JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,14,0) && defined(__cplusplus)
+    #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_CPP_ATTRIBUTES _Pragma("error_messages(off,attrskipunsup)")
+#elif \
+    JSON_HEDLEY_TI_VERSION_CHECK(18,1,0) || \
+    JSON_HEDLEY_TI_CL6X_VERSION_CHECK(8,3,0) || \
+    JSON_HEDLEY_TI_CL7X_VERSION_CHECK(1,2,0)
+    #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_CPP_ATTRIBUTES _Pragma("diag_suppress 1173")
+#elif JSON_HEDLEY_IAR_VERSION_CHECK(8,0,0)
+    #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_CPP_ATTRIBUTES _Pragma("diag_suppress=Pe1097")
+#elif JSON_HEDLEY_MCST_LCC_VERSION_CHECK(1,25,10)
+    #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_CPP_ATTRIBUTES _Pragma("diag_suppress 1097")
+#else
+    #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_CPP_ATTRIBUTES
+#endif
+
+#if defined(JSON_HEDLEY_DIAGNOSTIC_DISABLE_CAST_QUAL)
+    #undef JSON_HEDLEY_DIAGNOSTIC_DISABLE_CAST_QUAL
+#endif
+#if JSON_HEDLEY_HAS_WARNING("-Wcast-qual")
+    #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_CAST_QUAL _Pragma("clang diagnostic ignored \"-Wcast-qual\"")
+#elif JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0)
+    #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_CAST_QUAL _Pragma("warning(disable:2203 2331)")
+#elif JSON_HEDLEY_GCC_VERSION_CHECK(3,0,0)
+    #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_CAST_QUAL _Pragma("GCC diagnostic ignored \"-Wcast-qual\"")
+#else
+    #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_CAST_QUAL
+#endif
+
+#if defined(JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNUSED_FUNCTION)
+    #undef JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNUSED_FUNCTION
+#endif
+#if JSON_HEDLEY_HAS_WARNING("-Wunused-function")
+    #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNUSED_FUNCTION _Pragma("clang diagnostic ignored \"-Wunused-function\"")
+#elif JSON_HEDLEY_GCC_VERSION_CHECK(3,4,0)
+    #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNUSED_FUNCTION _Pragma("GCC diagnostic ignored \"-Wunused-function\"")
+#elif JSON_HEDLEY_MSVC_VERSION_CHECK(1,0,0)
+    #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNUSED_FUNCTION __pragma(warning(disable:4505))
+#elif JSON_HEDLEY_MCST_LCC_VERSION_CHECK(1,25,10)
+    #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNUSED_FUNCTION _Pragma("diag_suppress 3142")
+#else
+    #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNUSED_FUNCTION
+#endif
+
+#if defined(JSON_HEDLEY_DEPRECATED)
+    #undef JSON_HEDLEY_DEPRECATED
+#endif
+#if defined(JSON_HEDLEY_DEPRECATED_FOR)
+    #undef JSON_HEDLEY_DEPRECATED_FOR
+#endif
+#if \
+    JSON_HEDLEY_MSVC_VERSION_CHECK(14,0,0) || \
+    JSON_HEDLEY_INTEL_CL_VERSION_CHECK(2021,1,0)
+    #define JSON_HEDLEY_DEPRECATED(since) __declspec(deprecated("Since " # since))
+    #define JSON_HEDLEY_DEPRECATED_FOR(since, replacement) __declspec(deprecated("Since " #since "; use " #replacement))
+#elif \
+    (JSON_HEDLEY_HAS_EXTENSION(attribute_deprecated_with_message) && !defined(JSON_HEDLEY_IAR_VERSION)) || \
+    JSON_HEDLEY_GCC_VERSION_CHECK(4,5,0) || \
+    JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) || \
+    JSON_HEDLEY_ARM_VERSION_CHECK(5,6,0) || \
+    JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,13,0) || \
+    JSON_HEDLEY_PGI_VERSION_CHECK(17,10,0) || \
+    JSON_HEDLEY_TI_VERSION_CHECK(18,1,0) || \
+    JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(18,1,0) || \
+    JSON_HEDLEY_TI_CL6X_VERSION_CHECK(8,3,0) || \
+    JSON_HEDLEY_TI_CL7X_VERSION_CHECK(1,2,0) || \
+    JSON_HEDLEY_TI_CLPRU_VERSION_CHECK(2,3,0) || \
+    JSON_HEDLEY_MCST_LCC_VERSION_CHECK(1,25,10)
+    #define JSON_HEDLEY_DEPRECATED(since) __attribute__((__deprecated__("Since " #since)))
+    #define JSON_HEDLEY_DEPRECATED_FOR(since, replacement) __attribute__((__deprecated__("Since " #since "; use " #replacement)))
+#elif defined(__cplusplus) && (__cplusplus >= 201402L)
+    #define JSON_HEDLEY_DEPRECATED(since) JSON_HEDLEY_DIAGNOSTIC_DISABLE_CPP98_COMPAT_WRAP_([[deprecated("Since " #since)]])
+    #define JSON_HEDLEY_DEPRECATED_FOR(since, replacement) JSON_HEDLEY_DIAGNOSTIC_DISABLE_CPP98_COMPAT_WRAP_([[deprecated("Since " #since "; use " #replacement)]])
+#elif \
+    JSON_HEDLEY_HAS_ATTRIBUTE(deprecated) || \
+    JSON_HEDLEY_GCC_VERSION_CHECK(3,1,0) || \
+    JSON_HEDLEY_ARM_VERSION_CHECK(4,1,0) || \
+    JSON_HEDLEY_TI_VERSION_CHECK(15,12,0) || \
+    (JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(4,8,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \
+    JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(5,2,0) || \
+    (JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,0,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \
+    JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,4,0) || \
+    (JSON_HEDLEY_TI_CL430_VERSION_CHECK(4,0,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \
+    JSON_HEDLEY_TI_CL430_VERSION_CHECK(4,3,0) || \
+    (JSON_HEDLEY_TI_CL6X_VERSION_CHECK(7,2,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \
+    JSON_HEDLEY_TI_CL6X_VERSION_CHECK(7,5,0) || \
+    JSON_HEDLEY_TI_CL7X_VERSION_CHECK(1,2,0) || \
+    JSON_HEDLEY_TI_CLPRU_VERSION_CHECK(2,1,0) || \
+    JSON_HEDLEY_MCST_LCC_VERSION_CHECK(1,25,10) || \
+    JSON_HEDLEY_IAR_VERSION_CHECK(8,10,0)
+    #define JSON_HEDLEY_DEPRECATED(since) __attribute__((__deprecated__))
+    #define JSON_HEDLEY_DEPRECATED_FOR(since, replacement) __attribute__((__deprecated__))
+#elif \
+    JSON_HEDLEY_MSVC_VERSION_CHECK(13,10,0) || \
+    JSON_HEDLEY_PELLES_VERSION_CHECK(6,50,0) || \
+    JSON_HEDLEY_INTEL_CL_VERSION_CHECK(2021,1,0)
+    #define JSON_HEDLEY_DEPRECATED(since) __declspec(deprecated)
+    #define JSON_HEDLEY_DEPRECATED_FOR(since, replacement) __declspec(deprecated)
+#elif JSON_HEDLEY_IAR_VERSION_CHECK(8,0,0)
+    #define JSON_HEDLEY_DEPRECATED(since) _Pragma("deprecated")
+    #define JSON_HEDLEY_DEPRECATED_FOR(since, replacement) _Pragma("deprecated")
+#else
+    #define JSON_HEDLEY_DEPRECATED(since)
+    #define JSON_HEDLEY_DEPRECATED_FOR(since, replacement)
+#endif
+
+#if defined(JSON_HEDLEY_UNAVAILABLE)
+    #undef JSON_HEDLEY_UNAVAILABLE
+#endif
+#if \
+    JSON_HEDLEY_HAS_ATTRIBUTE(warning) || \
+    JSON_HEDLEY_GCC_VERSION_CHECK(4,3,0) || \
+    JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) || \
+    JSON_HEDLEY_MCST_LCC_VERSION_CHECK(1,25,10)
+    #define JSON_HEDLEY_UNAVAILABLE(available_since) __attribute__((__warning__("Not available until " #available_since)))
+#else
+    #define JSON_HEDLEY_UNAVAILABLE(available_since)
+#endif
+
+#if defined(JSON_HEDLEY_WARN_UNUSED_RESULT)
+    #undef JSON_HEDLEY_WARN_UNUSED_RESULT
+#endif
+#if defined(JSON_HEDLEY_WARN_UNUSED_RESULT_MSG)
+    #undef JSON_HEDLEY_WARN_UNUSED_RESULT_MSG
+#endif
+#if \
+    JSON_HEDLEY_HAS_ATTRIBUTE(warn_unused_result) || \
+    JSON_HEDLEY_GCC_VERSION_CHECK(3,4,0) || \
+    JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) || \
+    JSON_HEDLEY_TI_VERSION_CHECK(15,12,0) || \
+    (JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(4,8,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \
+    JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(5,2,0) || \
+    (JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,0,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \
+    JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,4,0) || \
+    (JSON_HEDLEY_TI_CL430_VERSION_CHECK(4,0,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \
+    JSON_HEDLEY_TI_CL430_VERSION_CHECK(4,3,0) || \
+    (JSON_HEDLEY_TI_CL6X_VERSION_CHECK(7,2,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \
+    JSON_HEDLEY_TI_CL6X_VERSION_CHECK(7,5,0) || \
+    JSON_HEDLEY_TI_CL7X_VERSION_CHECK(1,2,0) || \
+    JSON_HEDLEY_TI_CLPRU_VERSION_CHECK(2,1,0) || \
+    (JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,15,0) && defined(__cplusplus)) || \
+    JSON_HEDLEY_PGI_VERSION_CHECK(17,10,0) || \
+    JSON_HEDLEY_MCST_LCC_VERSION_CHECK(1,25,10)
+    #define JSON_HEDLEY_WARN_UNUSED_RESULT __attribute__((__warn_unused_result__))
+    #define JSON_HEDLEY_WARN_UNUSED_RESULT_MSG(msg) __attribute__((__warn_unused_result__))
+#elif (JSON_HEDLEY_HAS_CPP_ATTRIBUTE(nodiscard) >= 201907L)
+    #define JSON_HEDLEY_WARN_UNUSED_RESULT JSON_HEDLEY_DIAGNOSTIC_DISABLE_CPP98_COMPAT_WRAP_([[nodiscard]])
+    #define JSON_HEDLEY_WARN_UNUSED_RESULT_MSG(msg) JSON_HEDLEY_DIAGNOSTIC_DISABLE_CPP98_COMPAT_WRAP_([[nodiscard(msg)]])
+#elif JSON_HEDLEY_HAS_CPP_ATTRIBUTE(nodiscard)
+    #define JSON_HEDLEY_WARN_UNUSED_RESULT JSON_HEDLEY_DIAGNOSTIC_DISABLE_CPP98_COMPAT_WRAP_([[nodiscard]])
+    #define JSON_HEDLEY_WARN_UNUSED_RESULT_MSG(msg) JSON_HEDLEY_DIAGNOSTIC_DISABLE_CPP98_COMPAT_WRAP_([[nodiscard]])
+#elif defined(_Check_return_) /* SAL */
+    #define JSON_HEDLEY_WARN_UNUSED_RESULT _Check_return_
+    #define JSON_HEDLEY_WARN_UNUSED_RESULT_MSG(msg) _Check_return_
+#else
+    #define JSON_HEDLEY_WARN_UNUSED_RESULT
+    #define JSON_HEDLEY_WARN_UNUSED_RESULT_MSG(msg)
+#endif
+
+#if defined(JSON_HEDLEY_SENTINEL)
+    #undef JSON_HEDLEY_SENTINEL
+#endif
+#if \
+    JSON_HEDLEY_HAS_ATTRIBUTE(sentinel) || \
+    JSON_HEDLEY_GCC_VERSION_CHECK(4,0,0) || \
+    JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) || \
+    JSON_HEDLEY_ARM_VERSION_CHECK(5,4,0) || \
+    JSON_HEDLEY_MCST_LCC_VERSION_CHECK(1,25,10)
+    #define JSON_HEDLEY_SENTINEL(position) __attribute__((__sentinel__(position)))
+#else
+    #define JSON_HEDLEY_SENTINEL(position)
+#endif
+
+#if defined(JSON_HEDLEY_NO_RETURN)
+    #undef JSON_HEDLEY_NO_RETURN
+#endif
+#if JSON_HEDLEY_IAR_VERSION_CHECK(8,0,0)
+    #define JSON_HEDLEY_NO_RETURN __noreturn
+#elif \
+    JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) || \
+    JSON_HEDLEY_MCST_LCC_VERSION_CHECK(1,25,10)
+    #define JSON_HEDLEY_NO_RETURN __attribute__((__noreturn__))
+#elif defined(__STDC_VERSION__) && __STDC_VERSION__ >= 201112L
+    #define JSON_HEDLEY_NO_RETURN _Noreturn
+#elif defined(__cplusplus) && (__cplusplus >= 201103L)
+    #define JSON_HEDLEY_NO_RETURN JSON_HEDLEY_DIAGNOSTIC_DISABLE_CPP98_COMPAT_WRAP_([[noreturn]])
+#elif \
+    JSON_HEDLEY_HAS_ATTRIBUTE(noreturn) || \
+    JSON_HEDLEY_GCC_VERSION_CHECK(3,2,0) || \
+    JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,11,0) || \
+    JSON_HEDLEY_ARM_VERSION_CHECK(4,1,0) || \
+    JSON_HEDLEY_IBM_VERSION_CHECK(10,1,0) || \
+    JSON_HEDLEY_TI_VERSION_CHECK(15,12,0) || \
+    (JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(4,8,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \
+    JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(5,2,0) || \
+    (JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,0,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \
+    JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,4,0) || \
+    (JSON_HEDLEY_TI_CL430_VERSION_CHECK(4,0,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \
+    JSON_HEDLEY_TI_CL430_VERSION_CHECK(4,3,0) || \
+    (JSON_HEDLEY_TI_CL6X_VERSION_CHECK(7,2,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \
+    JSON_HEDLEY_TI_CL6X_VERSION_CHECK(7,5,0) || \
+    JSON_HEDLEY_TI_CL7X_VERSION_CHECK(1,2,0) || \
+    JSON_HEDLEY_TI_CLPRU_VERSION_CHECK(2,1,0) || \
+    JSON_HEDLEY_IAR_VERSION_CHECK(8,10,0)
+    #define JSON_HEDLEY_NO_RETURN __attribute__((__noreturn__))
+#elif JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,10,0)
+    #define JSON_HEDLEY_NO_RETURN _Pragma("does_not_return")
+#elif \
+    JSON_HEDLEY_MSVC_VERSION_CHECK(13,10,0) || \
+    JSON_HEDLEY_INTEL_CL_VERSION_CHECK(2021,1,0)
+    #define JSON_HEDLEY_NO_RETURN __declspec(noreturn)
+#elif JSON_HEDLEY_TI_CL6X_VERSION_CHECK(6,0,0) && defined(__cplusplus)
+    #define JSON_HEDLEY_NO_RETURN _Pragma("FUNC_NEVER_RETURNS;")
+#elif JSON_HEDLEY_COMPCERT_VERSION_CHECK(3,2,0)
+    #define JSON_HEDLEY_NO_RETURN __attribute((noreturn))
+#elif JSON_HEDLEY_PELLES_VERSION_CHECK(9,0,0)
+    #define JSON_HEDLEY_NO_RETURN __declspec(noreturn)
+#else
+    #define JSON_HEDLEY_NO_RETURN
+#endif
+
+#if defined(JSON_HEDLEY_NO_ESCAPE)
+    #undef JSON_HEDLEY_NO_ESCAPE
+#endif
+#if JSON_HEDLEY_HAS_ATTRIBUTE(noescape)
+    #define JSON_HEDLEY_NO_ESCAPE __attribute__((__noescape__))
+#else
+    #define JSON_HEDLEY_NO_ESCAPE
+#endif
+
+#if defined(JSON_HEDLEY_UNREACHABLE)
+    #undef JSON_HEDLEY_UNREACHABLE
+#endif
+#if defined(JSON_HEDLEY_UNREACHABLE_RETURN)
+    #undef JSON_HEDLEY_UNREACHABLE_RETURN
+#endif
+#if defined(JSON_HEDLEY_ASSUME)
+    #undef JSON_HEDLEY_ASSUME
+#endif
+#if \
+    JSON_HEDLEY_MSVC_VERSION_CHECK(13,10,0) || \
+    JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) || \
+    JSON_HEDLEY_INTEL_CL_VERSION_CHECK(2021,1,0)
+    #define JSON_HEDLEY_ASSUME(expr) __assume(expr)
+#elif JSON_HEDLEY_HAS_BUILTIN(__builtin_assume)
+    #define JSON_HEDLEY_ASSUME(expr) __builtin_assume(expr)
+#elif \
+    JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,2,0) || \
+    JSON_HEDLEY_TI_CL6X_VERSION_CHECK(4,0,0)
+    #if defined(__cplusplus)
+        #define JSON_HEDLEY_ASSUME(expr) std::_nassert(expr)
+    #else
+        #define JSON_HEDLEY_ASSUME(expr) _nassert(expr)
+    #endif
+#endif
+#if \
+    (JSON_HEDLEY_HAS_BUILTIN(__builtin_unreachable) && (!defined(JSON_HEDLEY_ARM_VERSION))) || \
+    JSON_HEDLEY_GCC_VERSION_CHECK(4,5,0) || \
+    JSON_HEDLEY_PGI_VERSION_CHECK(18,10,0) || \
+    JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) || \
+    JSON_HEDLEY_IBM_VERSION_CHECK(13,1,5) || \
+    JSON_HEDLEY_CRAY_VERSION_CHECK(10,0,0) || \
+    JSON_HEDLEY_MCST_LCC_VERSION_CHECK(1,25,10)
+    #define JSON_HEDLEY_UNREACHABLE() __builtin_unreachable()
+#elif defined(JSON_HEDLEY_ASSUME)
+    #define JSON_HEDLEY_UNREACHABLE() JSON_HEDLEY_ASSUME(0)
+#endif
+#if !defined(JSON_HEDLEY_ASSUME)
+    #if defined(JSON_HEDLEY_UNREACHABLE)
+        #define JSON_HEDLEY_ASSUME(expr) JSON_HEDLEY_STATIC_CAST(void, ((expr) ? 1 : (JSON_HEDLEY_UNREACHABLE(), 1)))
+    #else
+        #define JSON_HEDLEY_ASSUME(expr) JSON_HEDLEY_STATIC_CAST(void, expr)
+    #endif
+#endif
+#if defined(JSON_HEDLEY_UNREACHABLE)
+    #if  \
+        JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,2,0) || \
+        JSON_HEDLEY_TI_CL6X_VERSION_CHECK(4,0,0)
+        #define JSON_HEDLEY_UNREACHABLE_RETURN(value) return (JSON_HEDLEY_STATIC_CAST(void, JSON_HEDLEY_ASSUME(0)), (value))
+    #else
+        #define JSON_HEDLEY_UNREACHABLE_RETURN(value) JSON_HEDLEY_UNREACHABLE()
+    #endif
+#else
+    #define JSON_HEDLEY_UNREACHABLE_RETURN(value) return (value)
+#endif
+#if !defined(JSON_HEDLEY_UNREACHABLE)
+    #define JSON_HEDLEY_UNREACHABLE() JSON_HEDLEY_ASSUME(0)
+#endif
+
+JSON_HEDLEY_DIAGNOSTIC_PUSH
+#if JSON_HEDLEY_HAS_WARNING("-Wpedantic")
+    #pragma clang diagnostic ignored "-Wpedantic"
+#endif
+#if JSON_HEDLEY_HAS_WARNING("-Wc++98-compat-pedantic") && defined(__cplusplus)
+    #pragma clang diagnostic ignored "-Wc++98-compat-pedantic"
+#endif
+#if JSON_HEDLEY_GCC_HAS_WARNING("-Wvariadic-macros",4,0,0)
+    #if defined(__clang__)
+        #pragma clang diagnostic ignored "-Wvariadic-macros"
+    #elif defined(JSON_HEDLEY_GCC_VERSION)
+        #pragma GCC diagnostic ignored "-Wvariadic-macros"
+    #endif
+#endif
+#if defined(JSON_HEDLEY_NON_NULL)
+    #undef JSON_HEDLEY_NON_NULL
+#endif
+#if \
+    JSON_HEDLEY_HAS_ATTRIBUTE(nonnull) || \
+    JSON_HEDLEY_GCC_VERSION_CHECK(3,3,0) || \
+    JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) || \
+    JSON_HEDLEY_ARM_VERSION_CHECK(4,1,0)
+    #define JSON_HEDLEY_NON_NULL(...) __attribute__((__nonnull__(__VA_ARGS__)))
+#else
+    #define JSON_HEDLEY_NON_NULL(...)
+#endif
+JSON_HEDLEY_DIAGNOSTIC_POP
+
+#if defined(JSON_HEDLEY_PRINTF_FORMAT)
+    #undef JSON_HEDLEY_PRINTF_FORMAT
+#endif
+#if defined(__MINGW32__) && JSON_HEDLEY_GCC_HAS_ATTRIBUTE(format,4,4,0) && !defined(__USE_MINGW_ANSI_STDIO)
+    #define JSON_HEDLEY_PRINTF_FORMAT(string_idx,first_to_check) __attribute__((__format__(ms_printf, string_idx, first_to_check)))
+#elif defined(__MINGW32__) && JSON_HEDLEY_GCC_HAS_ATTRIBUTE(format,4,4,0) && defined(__USE_MINGW_ANSI_STDIO)
+    #define JSON_HEDLEY_PRINTF_FORMAT(string_idx,first_to_check) __attribute__((__format__(gnu_printf, string_idx, first_to_check)))
+#elif \
+    JSON_HEDLEY_HAS_ATTRIBUTE(format) || \
+    JSON_HEDLEY_GCC_VERSION_CHECK(3,1,0) || \
+    JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) || \
+    JSON_HEDLEY_ARM_VERSION_CHECK(5,6,0) || \
+    JSON_HEDLEY_IBM_VERSION_CHECK(10,1,0) || \
+    JSON_HEDLEY_TI_VERSION_CHECK(15,12,0) || \
+    (JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(4,8,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \
+    JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(5,2,0) || \
+    (JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,0,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \
+    JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,4,0) || \
+    (JSON_HEDLEY_TI_CL430_VERSION_CHECK(4,0,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \
+    JSON_HEDLEY_TI_CL430_VERSION_CHECK(4,3,0) || \
+    (JSON_HEDLEY_TI_CL6X_VERSION_CHECK(7,2,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \
+    JSON_HEDLEY_TI_CL6X_VERSION_CHECK(7,5,0) || \
+    JSON_HEDLEY_TI_CL7X_VERSION_CHECK(1,2,0) || \
+    JSON_HEDLEY_TI_CLPRU_VERSION_CHECK(2,1,0) || \
+    JSON_HEDLEY_MCST_LCC_VERSION_CHECK(1,25,10)
+    #define JSON_HEDLEY_PRINTF_FORMAT(string_idx,first_to_check) __attribute__((__format__(__printf__, string_idx, first_to_check)))
+#elif JSON_HEDLEY_PELLES_VERSION_CHECK(6,0,0)
+    #define JSON_HEDLEY_PRINTF_FORMAT(string_idx,first_to_check) __declspec(vaformat(printf,string_idx,first_to_check))
+#else
+    #define JSON_HEDLEY_PRINTF_FORMAT(string_idx,first_to_check)
+#endif
+
+#if defined(JSON_HEDLEY_CONSTEXPR)
+    #undef JSON_HEDLEY_CONSTEXPR
+#endif
+#if defined(__cplusplus)
+    #if __cplusplus >= 201103L
+        #define JSON_HEDLEY_CONSTEXPR JSON_HEDLEY_DIAGNOSTIC_DISABLE_CPP98_COMPAT_WRAP_(constexpr)
+    #endif
+#endif
+#if !defined(JSON_HEDLEY_CONSTEXPR)
+    #define JSON_HEDLEY_CONSTEXPR
+#endif
+
+#if defined(JSON_HEDLEY_PREDICT)
+    #undef JSON_HEDLEY_PREDICT
+#endif
+#if defined(JSON_HEDLEY_LIKELY)
+    #undef JSON_HEDLEY_LIKELY
+#endif
+#if defined(JSON_HEDLEY_UNLIKELY)
+    #undef JSON_HEDLEY_UNLIKELY
+#endif
+#if defined(JSON_HEDLEY_UNPREDICTABLE)
+    #undef JSON_HEDLEY_UNPREDICTABLE
+#endif
+#if JSON_HEDLEY_HAS_BUILTIN(__builtin_unpredictable)
+    #define JSON_HEDLEY_UNPREDICTABLE(expr) __builtin_unpredictable((expr))
+#endif
+#if \
+  (JSON_HEDLEY_HAS_BUILTIN(__builtin_expect_with_probability) && !defined(JSON_HEDLEY_PGI_VERSION)) || \
+  JSON_HEDLEY_GCC_VERSION_CHECK(9,0,0) || \
+  JSON_HEDLEY_MCST_LCC_VERSION_CHECK(1,25,10)
+#  define JSON_HEDLEY_PREDICT(expr, value, probability) __builtin_expect_with_probability(  (expr), (value), (probability))
+#  define JSON_HEDLEY_PREDICT_TRUE(expr, probability)   __builtin_expect_with_probability(!!(expr),    1   , (probability))
+#  define JSON_HEDLEY_PREDICT_FALSE(expr, probability)  __builtin_expect_with_probability(!!(expr),    0   , (probability))
+#  define JSON_HEDLEY_LIKELY(expr)                      __builtin_expect                 (!!(expr),    1                  )
+#  define JSON_HEDLEY_UNLIKELY(expr)                    __builtin_expect                 (!!(expr),    0                  )
+#elif \
+  (JSON_HEDLEY_HAS_BUILTIN(__builtin_expect) && !defined(JSON_HEDLEY_INTEL_CL_VERSION)) || \
+  JSON_HEDLEY_GCC_VERSION_CHECK(3,0,0) || \
+  JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) || \
+  (JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,15,0) && defined(__cplusplus)) || \
+  JSON_HEDLEY_ARM_VERSION_CHECK(4,1,0) || \
+  JSON_HEDLEY_IBM_VERSION_CHECK(10,1,0) || \
+  JSON_HEDLEY_TI_VERSION_CHECK(15,12,0) || \
+  JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(4,7,0) || \
+  JSON_HEDLEY_TI_CL430_VERSION_CHECK(3,1,0) || \
+  JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,1,0) || \
+  JSON_HEDLEY_TI_CL6X_VERSION_CHECK(6,1,0) || \
+  JSON_HEDLEY_TI_CL7X_VERSION_CHECK(1,2,0) || \
+  JSON_HEDLEY_TI_CLPRU_VERSION_CHECK(2,1,0) || \
+  JSON_HEDLEY_TINYC_VERSION_CHECK(0,9,27) || \
+  JSON_HEDLEY_CRAY_VERSION_CHECK(8,1,0) || \
+  JSON_HEDLEY_MCST_LCC_VERSION_CHECK(1,25,10)
+#  define JSON_HEDLEY_PREDICT(expr, expected, probability) \
+    (((probability) >= 0.9) ? __builtin_expect((expr), (expected)) : (JSON_HEDLEY_STATIC_CAST(void, expected), (expr)))
+#  define JSON_HEDLEY_PREDICT_TRUE(expr, probability) \
+    (__extension__ ({ \
+        double hedley_probability_ = (probability); \
+        ((hedley_probability_ >= 0.9) ? __builtin_expect(!!(expr), 1) : ((hedley_probability_ <= 0.1) ? __builtin_expect(!!(expr), 0) : !!(expr))); \
+    }))
+#  define JSON_HEDLEY_PREDICT_FALSE(expr, probability) \
+    (__extension__ ({ \
+        double hedley_probability_ = (probability); \
+        ((hedley_probability_ >= 0.9) ? __builtin_expect(!!(expr), 0) : ((hedley_probability_ <= 0.1) ? __builtin_expect(!!(expr), 1) : !!(expr))); \
+    }))
+#  define JSON_HEDLEY_LIKELY(expr)   __builtin_expect(!!(expr), 1)
+#  define JSON_HEDLEY_UNLIKELY(expr) __builtin_expect(!!(expr), 0)
+#else
+#  define JSON_HEDLEY_PREDICT(expr, expected, probability) (JSON_HEDLEY_STATIC_CAST(void, expected), (expr))
+#  define JSON_HEDLEY_PREDICT_TRUE(expr, probability) (!!(expr))
+#  define JSON_HEDLEY_PREDICT_FALSE(expr, probability) (!!(expr))
+#  define JSON_HEDLEY_LIKELY(expr) (!!(expr))
+#  define JSON_HEDLEY_UNLIKELY(expr) (!!(expr))
+#endif
+#if !defined(JSON_HEDLEY_UNPREDICTABLE)
+    #define JSON_HEDLEY_UNPREDICTABLE(expr) JSON_HEDLEY_PREDICT(expr, 1, 0.5)
+#endif
+
+#if defined(JSON_HEDLEY_MALLOC)
+    #undef JSON_HEDLEY_MALLOC
+#endif
+#if \
+    JSON_HEDLEY_HAS_ATTRIBUTE(malloc) || \
+    JSON_HEDLEY_GCC_VERSION_CHECK(3,1,0) || \
+    JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) || \
+    JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,11,0) || \
+    JSON_HEDLEY_ARM_VERSION_CHECK(4,1,0) || \
+    JSON_HEDLEY_IBM_VERSION_CHECK(12,1,0) || \
+    JSON_HEDLEY_TI_VERSION_CHECK(15,12,0) || \
+    (JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(4,8,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \
+    JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(5,2,0) || \
+    (JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,0,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \
+    JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,4,0) || \
+    (JSON_HEDLEY_TI_CL430_VERSION_CHECK(4,0,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \
+    JSON_HEDLEY_TI_CL430_VERSION_CHECK(4,3,0) || \
+    (JSON_HEDLEY_TI_CL6X_VERSION_CHECK(7,2,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \
+    JSON_HEDLEY_TI_CL6X_VERSION_CHECK(7,5,0) || \
+    JSON_HEDLEY_TI_CL7X_VERSION_CHECK(1,2,0) || \
+    JSON_HEDLEY_TI_CLPRU_VERSION_CHECK(2,1,0) || \
+    JSON_HEDLEY_MCST_LCC_VERSION_CHECK(1,25,10)
+    #define JSON_HEDLEY_MALLOC __attribute__((__malloc__))
+#elif JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,10,0)
+    #define JSON_HEDLEY_MALLOC _Pragma("returns_new_memory")
+#elif \
+    JSON_HEDLEY_MSVC_VERSION_CHECK(14,0,0) || \
+    JSON_HEDLEY_INTEL_CL_VERSION_CHECK(2021,1,0)
+    #define JSON_HEDLEY_MALLOC __declspec(restrict)
+#else
+    #define JSON_HEDLEY_MALLOC
+#endif
+
+#if defined(JSON_HEDLEY_PURE)
+    #undef JSON_HEDLEY_PURE
+#endif
+#if \
+  JSON_HEDLEY_HAS_ATTRIBUTE(pure) || \
+  JSON_HEDLEY_GCC_VERSION_CHECK(2,96,0) || \
+  JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) || \
+  JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,11,0) || \
+  JSON_HEDLEY_ARM_VERSION_CHECK(4,1,0) || \
+  JSON_HEDLEY_IBM_VERSION_CHECK(10,1,0) || \
+  JSON_HEDLEY_TI_VERSION_CHECK(15,12,0) || \
+  (JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(4,8,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \
+  JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(5,2,0) || \
+  (JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,0,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \
+  JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,4,0) || \
+  (JSON_HEDLEY_TI_CL430_VERSION_CHECK(4,0,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \
+  JSON_HEDLEY_TI_CL430_VERSION_CHECK(4,3,0) || \
+  (JSON_HEDLEY_TI_CL6X_VERSION_CHECK(7,2,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \
+  JSON_HEDLEY_TI_CL6X_VERSION_CHECK(7,5,0) || \
+  JSON_HEDLEY_TI_CL7X_VERSION_CHECK(1,2,0) || \
+  JSON_HEDLEY_TI_CLPRU_VERSION_CHECK(2,1,0) || \
+  JSON_HEDLEY_PGI_VERSION_CHECK(17,10,0) || \
+  JSON_HEDLEY_MCST_LCC_VERSION_CHECK(1,25,10)
+#  define JSON_HEDLEY_PURE __attribute__((__pure__))
+#elif JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,10,0)
+#  define JSON_HEDLEY_PURE _Pragma("does_not_write_global_data")
+#elif defined(__cplusplus) && \
+    ( \
+      JSON_HEDLEY_TI_CL430_VERSION_CHECK(2,0,1) || \
+      JSON_HEDLEY_TI_CL6X_VERSION_CHECK(4,0,0) || \
+      JSON_HEDLEY_TI_CL7X_VERSION_CHECK(1,2,0) \
+    )
+#  define JSON_HEDLEY_PURE _Pragma("FUNC_IS_PURE;")
+#else
+#  define JSON_HEDLEY_PURE
+#endif
+
+#if defined(JSON_HEDLEY_CONST)
+    #undef JSON_HEDLEY_CONST
+#endif
+#if \
+    JSON_HEDLEY_HAS_ATTRIBUTE(const) || \
+    JSON_HEDLEY_GCC_VERSION_CHECK(2,5,0) || \
+    JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) || \
+    JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,11,0) || \
+    JSON_HEDLEY_ARM_VERSION_CHECK(4,1,0) || \
+    JSON_HEDLEY_IBM_VERSION_CHECK(10,1,0) || \
+    JSON_HEDLEY_TI_VERSION_CHECK(15,12,0) || \
+    (JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(4,8,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \
+    JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(5,2,0) || \
+    (JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,0,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \
+    JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,4,0) || \
+    (JSON_HEDLEY_TI_CL430_VERSION_CHECK(4,0,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \
+    JSON_HEDLEY_TI_CL430_VERSION_CHECK(4,3,0) || \
+    (JSON_HEDLEY_TI_CL6X_VERSION_CHECK(7,2,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \
+    JSON_HEDLEY_TI_CL6X_VERSION_CHECK(7,5,0) || \
+    JSON_HEDLEY_TI_CL7X_VERSION_CHECK(1,2,0) || \
+    JSON_HEDLEY_TI_CLPRU_VERSION_CHECK(2,1,0) || \
+    JSON_HEDLEY_PGI_VERSION_CHECK(17,10,0) || \
+    JSON_HEDLEY_MCST_LCC_VERSION_CHECK(1,25,10)
+    #define JSON_HEDLEY_CONST __attribute__((__const__))
+#elif \
+    JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,10,0)
+    #define JSON_HEDLEY_CONST _Pragma("no_side_effect")
+#else
+    #define JSON_HEDLEY_CONST JSON_HEDLEY_PURE
+#endif
+
+#if defined(JSON_HEDLEY_RESTRICT)
+    #undef JSON_HEDLEY_RESTRICT
+#endif
+#if defined(__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L) && !defined(__cplusplus)
+    #define JSON_HEDLEY_RESTRICT restrict
+#elif \
+    JSON_HEDLEY_GCC_VERSION_CHECK(3,1,0) || \
+    JSON_HEDLEY_MSVC_VERSION_CHECK(14,0,0) || \
+    JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) || \
+    JSON_HEDLEY_INTEL_CL_VERSION_CHECK(2021,1,0) || \
+    JSON_HEDLEY_ARM_VERSION_CHECK(4,1,0) || \
+    JSON_HEDLEY_IBM_VERSION_CHECK(10,1,0) || \
+    JSON_HEDLEY_PGI_VERSION_CHECK(17,10,0) || \
+    JSON_HEDLEY_TI_CL430_VERSION_CHECK(4,3,0) || \
+    JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,2,4) || \
+    JSON_HEDLEY_TI_CL6X_VERSION_CHECK(8,1,0) || \
+    JSON_HEDLEY_TI_CL7X_VERSION_CHECK(1,2,0) || \
+    (JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,14,0) && defined(__cplusplus)) || \
+    JSON_HEDLEY_IAR_VERSION_CHECK(8,0,0) || \
+    defined(__clang__) || \
+    JSON_HEDLEY_MCST_LCC_VERSION_CHECK(1,25,10)
+    #define JSON_HEDLEY_RESTRICT __restrict
+#elif JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,3,0) && !defined(__cplusplus)
+    #define JSON_HEDLEY_RESTRICT _Restrict
+#else
+    #define JSON_HEDLEY_RESTRICT
+#endif
+
+#if defined(JSON_HEDLEY_INLINE)
+    #undef JSON_HEDLEY_INLINE
+#endif
+#if \
+    (defined(__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L)) || \
+    (defined(__cplusplus) && (__cplusplus >= 199711L))
+    #define JSON_HEDLEY_INLINE inline
+#elif \
+    defined(JSON_HEDLEY_GCC_VERSION) || \
+    JSON_HEDLEY_ARM_VERSION_CHECK(6,2,0)
+    #define JSON_HEDLEY_INLINE __inline__
+#elif \
+    JSON_HEDLEY_MSVC_VERSION_CHECK(12,0,0) || \
+    JSON_HEDLEY_INTEL_CL_VERSION_CHECK(2021,1,0) || \
+    JSON_HEDLEY_ARM_VERSION_CHECK(4,1,0) || \
+    JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(5,1,0) || \
+    JSON_HEDLEY_TI_CL430_VERSION_CHECK(3,1,0) || \
+    JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,2,0) || \
+    JSON_HEDLEY_TI_CL6X_VERSION_CHECK(8,0,0) || \
+    JSON_HEDLEY_TI_CL7X_VERSION_CHECK(1,2,0) || \
+    JSON_HEDLEY_TI_CLPRU_VERSION_CHECK(2,1,0) || \
+    JSON_HEDLEY_MCST_LCC_VERSION_CHECK(1,25,10)
+    #define JSON_HEDLEY_INLINE __inline
+#else
+    #define JSON_HEDLEY_INLINE
+#endif
+
+#if defined(JSON_HEDLEY_ALWAYS_INLINE)
+    #undef JSON_HEDLEY_ALWAYS_INLINE
+#endif
+#if \
+  JSON_HEDLEY_HAS_ATTRIBUTE(always_inline) || \
+  JSON_HEDLEY_GCC_VERSION_CHECK(4,0,0) || \
+  JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) || \
+  JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,11,0) || \
+  JSON_HEDLEY_ARM_VERSION_CHECK(4,1,0) || \
+  JSON_HEDLEY_IBM_VERSION_CHECK(10,1,0) || \
+  JSON_HEDLEY_TI_VERSION_CHECK(15,12,0) || \
+  (JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(4,8,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \
+  JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(5,2,0) || \
+  (JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,0,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \
+  JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,4,0) || \
+  (JSON_HEDLEY_TI_CL430_VERSION_CHECK(4,0,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \
+  JSON_HEDLEY_TI_CL430_VERSION_CHECK(4,3,0) || \
+  (JSON_HEDLEY_TI_CL6X_VERSION_CHECK(7,2,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \
+  JSON_HEDLEY_TI_CL6X_VERSION_CHECK(7,5,0) || \
+  JSON_HEDLEY_TI_CL7X_VERSION_CHECK(1,2,0) || \
+  JSON_HEDLEY_TI_CLPRU_VERSION_CHECK(2,1,0) || \
+  JSON_HEDLEY_MCST_LCC_VERSION_CHECK(1,25,10) || \
+  JSON_HEDLEY_IAR_VERSION_CHECK(8,10,0)
+#  define JSON_HEDLEY_ALWAYS_INLINE __attribute__((__always_inline__)) JSON_HEDLEY_INLINE
+#elif \
+  JSON_HEDLEY_MSVC_VERSION_CHECK(12,0,0) || \
+  JSON_HEDLEY_INTEL_CL_VERSION_CHECK(2021,1,0)
+#  define JSON_HEDLEY_ALWAYS_INLINE __forceinline
+#elif defined(__cplusplus) && \
+    ( \
+      JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(5,2,0) || \
+      JSON_HEDLEY_TI_CL430_VERSION_CHECK(4,3,0) || \
+      JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,4,0) || \
+      JSON_HEDLEY_TI_CL6X_VERSION_CHECK(6,1,0) || \
+      JSON_HEDLEY_TI_CL7X_VERSION_CHECK(1,2,0) || \
+      JSON_HEDLEY_TI_CLPRU_VERSION_CHECK(2,1,0) \
+    )
+#  define JSON_HEDLEY_ALWAYS_INLINE _Pragma("FUNC_ALWAYS_INLINE;")
+#elif JSON_HEDLEY_IAR_VERSION_CHECK(8,0,0)
+#  define JSON_HEDLEY_ALWAYS_INLINE _Pragma("inline=forced")
+#else
+#  define JSON_HEDLEY_ALWAYS_INLINE JSON_HEDLEY_INLINE
+#endif
+
+#if defined(JSON_HEDLEY_NEVER_INLINE)
+    #undef JSON_HEDLEY_NEVER_INLINE
+#endif
+#if \
+    JSON_HEDLEY_HAS_ATTRIBUTE(noinline) || \
+    JSON_HEDLEY_GCC_VERSION_CHECK(4,0,0) || \
+    JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) || \
+    JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,11,0) || \
+    JSON_HEDLEY_ARM_VERSION_CHECK(4,1,0) || \
+    JSON_HEDLEY_IBM_VERSION_CHECK(10,1,0) || \
+    JSON_HEDLEY_TI_VERSION_CHECK(15,12,0) || \
+    (JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(4,8,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \
+    JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(5,2,0) || \
+    (JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,0,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \
+    JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,4,0) || \
+    (JSON_HEDLEY_TI_CL430_VERSION_CHECK(4,0,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \
+    JSON_HEDLEY_TI_CL430_VERSION_CHECK(4,3,0) || \
+    (JSON_HEDLEY_TI_CL6X_VERSION_CHECK(7,2,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \
+    JSON_HEDLEY_TI_CL6X_VERSION_CHECK(7,5,0) || \
+    JSON_HEDLEY_TI_CL7X_VERSION_CHECK(1,2,0) || \
+    JSON_HEDLEY_TI_CLPRU_VERSION_CHECK(2,1,0) || \
+    JSON_HEDLEY_MCST_LCC_VERSION_CHECK(1,25,10) || \
+    JSON_HEDLEY_IAR_VERSION_CHECK(8,10,0)
+    #define JSON_HEDLEY_NEVER_INLINE __attribute__((__noinline__))
+#elif \
+    JSON_HEDLEY_MSVC_VERSION_CHECK(13,10,0) || \
+    JSON_HEDLEY_INTEL_CL_VERSION_CHECK(2021,1,0)
+    #define JSON_HEDLEY_NEVER_INLINE __declspec(noinline)
+#elif JSON_HEDLEY_PGI_VERSION_CHECK(10,2,0)
+    #define JSON_HEDLEY_NEVER_INLINE _Pragma("noinline")
+#elif JSON_HEDLEY_TI_CL6X_VERSION_CHECK(6,0,0) && defined(__cplusplus)
+    #define JSON_HEDLEY_NEVER_INLINE _Pragma("FUNC_CANNOT_INLINE;")
+#elif JSON_HEDLEY_IAR_VERSION_CHECK(8,0,0)
+    #define JSON_HEDLEY_NEVER_INLINE _Pragma("inline=never")
+#elif JSON_HEDLEY_COMPCERT_VERSION_CHECK(3,2,0)
+    #define JSON_HEDLEY_NEVER_INLINE __attribute((noinline))
+#elif JSON_HEDLEY_PELLES_VERSION_CHECK(9,0,0)
+    #define JSON_HEDLEY_NEVER_INLINE __declspec(noinline)
+#else
+    #define JSON_HEDLEY_NEVER_INLINE
+#endif
+
+#if defined(JSON_HEDLEY_PRIVATE)
+    #undef JSON_HEDLEY_PRIVATE
+#endif
+#if defined(JSON_HEDLEY_PUBLIC)
+    #undef JSON_HEDLEY_PUBLIC
+#endif
+#if defined(JSON_HEDLEY_IMPORT)
+    #undef JSON_HEDLEY_IMPORT
+#endif
+#if defined(_WIN32) || defined(__CYGWIN__)
+#  define JSON_HEDLEY_PRIVATE
+#  define JSON_HEDLEY_PUBLIC   __declspec(dllexport)
+#  define JSON_HEDLEY_IMPORT   __declspec(dllimport)
+#else
+#  if \
+    JSON_HEDLEY_HAS_ATTRIBUTE(visibility) || \
+    JSON_HEDLEY_GCC_VERSION_CHECK(3,3,0) || \
+    JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,11,0) || \
+    JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) || \
+    JSON_HEDLEY_ARM_VERSION_CHECK(4,1,0) || \
+    JSON_HEDLEY_IBM_VERSION_CHECK(13,1,0) || \
+    ( \
+      defined(__TI_EABI__) && \
+      ( \
+        (JSON_HEDLEY_TI_CL6X_VERSION_CHECK(7,2,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \
+        JSON_HEDLEY_TI_CL6X_VERSION_CHECK(7,5,0) \
+      ) \
+    ) || \
+    JSON_HEDLEY_MCST_LCC_VERSION_CHECK(1,25,10)
+#    define JSON_HEDLEY_PRIVATE __attribute__((__visibility__("hidden")))
+#    define JSON_HEDLEY_PUBLIC  __attribute__((__visibility__("default")))
+#  else
+#    define JSON_HEDLEY_PRIVATE
+#    define JSON_HEDLEY_PUBLIC
+#  endif
+#  define JSON_HEDLEY_IMPORT    extern
+#endif
+
+#if defined(JSON_HEDLEY_NO_THROW)
+    #undef JSON_HEDLEY_NO_THROW
+#endif
+#if \
+    JSON_HEDLEY_HAS_ATTRIBUTE(nothrow) || \
+    JSON_HEDLEY_GCC_VERSION_CHECK(3,3,0) || \
+    JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) || \
+    JSON_HEDLEY_MCST_LCC_VERSION_CHECK(1,25,10)
+    #define JSON_HEDLEY_NO_THROW __attribute__((__nothrow__))
+#elif \
+    JSON_HEDLEY_MSVC_VERSION_CHECK(13,1,0) || \
+    JSON_HEDLEY_INTEL_CL_VERSION_CHECK(2021,1,0) || \
+    JSON_HEDLEY_ARM_VERSION_CHECK(4,1,0)
+    #define JSON_HEDLEY_NO_THROW __declspec(nothrow)
+#else
+    #define JSON_HEDLEY_NO_THROW
+#endif
+
+#if defined(JSON_HEDLEY_FALL_THROUGH)
+    #undef JSON_HEDLEY_FALL_THROUGH
+#endif
+#if \
+    JSON_HEDLEY_HAS_ATTRIBUTE(fallthrough) || \
+    JSON_HEDLEY_GCC_VERSION_CHECK(7,0,0) || \
+    JSON_HEDLEY_MCST_LCC_VERSION_CHECK(1,25,10)
+    #define JSON_HEDLEY_FALL_THROUGH __attribute__((__fallthrough__))
+#elif JSON_HEDLEY_HAS_CPP_ATTRIBUTE_NS(clang,fallthrough)
+    #define JSON_HEDLEY_FALL_THROUGH JSON_HEDLEY_DIAGNOSTIC_DISABLE_CPP98_COMPAT_WRAP_([[clang::fallthrough]])
+#elif JSON_HEDLEY_HAS_CPP_ATTRIBUTE(fallthrough)
+    #define JSON_HEDLEY_FALL_THROUGH JSON_HEDLEY_DIAGNOSTIC_DISABLE_CPP98_COMPAT_WRAP_([[fallthrough]])
+#elif defined(__fallthrough) /* SAL */
+    #define JSON_HEDLEY_FALL_THROUGH __fallthrough
+#else
+    #define JSON_HEDLEY_FALL_THROUGH
+#endif
+
+#if defined(JSON_HEDLEY_RETURNS_NON_NULL)
+    #undef JSON_HEDLEY_RETURNS_NON_NULL
+#endif
+#if \
+    JSON_HEDLEY_HAS_ATTRIBUTE(returns_nonnull) || \
+    JSON_HEDLEY_GCC_VERSION_CHECK(4,9,0) || \
+    JSON_HEDLEY_MCST_LCC_VERSION_CHECK(1,25,10)
+    #define JSON_HEDLEY_RETURNS_NON_NULL __attribute__((__returns_nonnull__))
+#elif defined(_Ret_notnull_) /* SAL */
+    #define JSON_HEDLEY_RETURNS_NON_NULL _Ret_notnull_
+#else
+    #define JSON_HEDLEY_RETURNS_NON_NULL
+#endif
+
+#if defined(JSON_HEDLEY_ARRAY_PARAM)
+    #undef JSON_HEDLEY_ARRAY_PARAM
+#endif
+#if \
+    defined(__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L) && \
+    !defined(__STDC_NO_VLA__) && \
+    !defined(__cplusplus) && \
+    !defined(JSON_HEDLEY_PGI_VERSION) && \
+    !defined(JSON_HEDLEY_TINYC_VERSION)
+    #define JSON_HEDLEY_ARRAY_PARAM(name) (name)
+#else
+    #define JSON_HEDLEY_ARRAY_PARAM(name)
+#endif
+
+#if defined(JSON_HEDLEY_IS_CONSTANT)
+    #undef JSON_HEDLEY_IS_CONSTANT
+#endif
+#if defined(JSON_HEDLEY_REQUIRE_CONSTEXPR)
+    #undef JSON_HEDLEY_REQUIRE_CONSTEXPR
+#endif
+/* JSON_HEDLEY_IS_CONSTEXPR_ is for
+   HEDLEY INTERNAL USE ONLY.  API subject to change without notice. */
+#if defined(JSON_HEDLEY_IS_CONSTEXPR_)
+    #undef JSON_HEDLEY_IS_CONSTEXPR_
+#endif
+#if \
+    JSON_HEDLEY_HAS_BUILTIN(__builtin_constant_p) || \
+    JSON_HEDLEY_GCC_VERSION_CHECK(3,4,0) || \
+    JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) || \
+    JSON_HEDLEY_TINYC_VERSION_CHECK(0,9,19) || \
+    JSON_HEDLEY_ARM_VERSION_CHECK(4,1,0) || \
+    JSON_HEDLEY_IBM_VERSION_CHECK(13,1,0) || \
+    JSON_HEDLEY_TI_CL6X_VERSION_CHECK(6,1,0) || \
+    (JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,10,0) && !defined(__cplusplus)) || \
+    JSON_HEDLEY_CRAY_VERSION_CHECK(8,1,0) || \
+    JSON_HEDLEY_MCST_LCC_VERSION_CHECK(1,25,10)
+    #define JSON_HEDLEY_IS_CONSTANT(expr) __builtin_constant_p(expr)
+#endif
+#if !defined(__cplusplus)
+#  if \
+       JSON_HEDLEY_HAS_BUILTIN(__builtin_types_compatible_p) || \
+       JSON_HEDLEY_GCC_VERSION_CHECK(3,4,0) || \
+       JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) || \
+       JSON_HEDLEY_IBM_VERSION_CHECK(13,1,0) || \
+       JSON_HEDLEY_CRAY_VERSION_CHECK(8,1,0) || \
+       JSON_HEDLEY_ARM_VERSION_CHECK(5,4,0) || \
+       JSON_HEDLEY_TINYC_VERSION_CHECK(0,9,24)
+#if defined(__INTPTR_TYPE__)
+    #define JSON_HEDLEY_IS_CONSTEXPR_(expr) __builtin_types_compatible_p(__typeof__((1 ? (void*) ((__INTPTR_TYPE__) ((expr) * 0)) : (int*) 0)), int*)
+#else
+    #include 
+    #define JSON_HEDLEY_IS_CONSTEXPR_(expr) __builtin_types_compatible_p(__typeof__((1 ? (void*) ((intptr_t) ((expr) * 0)) : (int*) 0)), int*)
+#endif
+#  elif \
+       ( \
+          defined(__STDC_VERSION__) && (__STDC_VERSION__ >= 201112L) && \
+          !defined(JSON_HEDLEY_SUNPRO_VERSION) && \
+          !defined(JSON_HEDLEY_PGI_VERSION) && \
+          !defined(JSON_HEDLEY_IAR_VERSION)) || \
+       (JSON_HEDLEY_HAS_EXTENSION(c_generic_selections) && !defined(JSON_HEDLEY_IAR_VERSION)) || \
+       JSON_HEDLEY_GCC_VERSION_CHECK(4,9,0) || \
+       JSON_HEDLEY_INTEL_VERSION_CHECK(17,0,0) || \
+       JSON_HEDLEY_IBM_VERSION_CHECK(12,1,0) || \
+       JSON_HEDLEY_ARM_VERSION_CHECK(5,3,0)
+#if defined(__INTPTR_TYPE__)
+    #define JSON_HEDLEY_IS_CONSTEXPR_(expr) _Generic((1 ? (void*) ((__INTPTR_TYPE__) ((expr) * 0)) : (int*) 0), int*: 1, void*: 0)
+#else
+    #include 
+    #define JSON_HEDLEY_IS_CONSTEXPR_(expr) _Generic((1 ? (void*) ((intptr_t) * 0) : (int*) 0), int*: 1, void*: 0)
+#endif
+#  elif \
+       defined(JSON_HEDLEY_GCC_VERSION) || \
+       defined(JSON_HEDLEY_INTEL_VERSION) || \
+       defined(JSON_HEDLEY_TINYC_VERSION) || \
+       defined(JSON_HEDLEY_TI_ARMCL_VERSION) || \
+       JSON_HEDLEY_TI_CL430_VERSION_CHECK(18,12,0) || \
+       defined(JSON_HEDLEY_TI_CL2000_VERSION) || \
+       defined(JSON_HEDLEY_TI_CL6X_VERSION) || \
+       defined(JSON_HEDLEY_TI_CL7X_VERSION) || \
+       defined(JSON_HEDLEY_TI_CLPRU_VERSION) || \
+       defined(__clang__)
+#    define JSON_HEDLEY_IS_CONSTEXPR_(expr) ( \
+        sizeof(void) != \
+        sizeof(*( \
+                  1 ? \
+                  ((void*) ((expr) * 0L) ) : \
+((struct { char v[sizeof(void) * 2]; } *) 1) \
+                ) \
+              ) \
+                                            )
+#  endif
+#endif
+#if defined(JSON_HEDLEY_IS_CONSTEXPR_)
+    #if !defined(JSON_HEDLEY_IS_CONSTANT)
+        #define JSON_HEDLEY_IS_CONSTANT(expr) JSON_HEDLEY_IS_CONSTEXPR_(expr)
+    #endif
+    #define JSON_HEDLEY_REQUIRE_CONSTEXPR(expr) (JSON_HEDLEY_IS_CONSTEXPR_(expr) ? (expr) : (-1))
+#else
+    #if !defined(JSON_HEDLEY_IS_CONSTANT)
+        #define JSON_HEDLEY_IS_CONSTANT(expr) (0)
+    #endif
+    #define JSON_HEDLEY_REQUIRE_CONSTEXPR(expr) (expr)
+#endif
+
+#if defined(JSON_HEDLEY_BEGIN_C_DECLS)
+    #undef JSON_HEDLEY_BEGIN_C_DECLS
+#endif
+#if defined(JSON_HEDLEY_END_C_DECLS)
+    #undef JSON_HEDLEY_END_C_DECLS
+#endif
+#if defined(JSON_HEDLEY_C_DECL)
+    #undef JSON_HEDLEY_C_DECL
+#endif
+#if defined(__cplusplus)
+    #define JSON_HEDLEY_BEGIN_C_DECLS extern "C" {
+    #define JSON_HEDLEY_END_C_DECLS }
+    #define JSON_HEDLEY_C_DECL extern "C"
+#else
+    #define JSON_HEDLEY_BEGIN_C_DECLS
+    #define JSON_HEDLEY_END_C_DECLS
+    #define JSON_HEDLEY_C_DECL
+#endif
+
+#if defined(JSON_HEDLEY_STATIC_ASSERT)
+    #undef JSON_HEDLEY_STATIC_ASSERT
+#endif
+#if \
+  !defined(__cplusplus) && ( \
+      (defined(__STDC_VERSION__) && (__STDC_VERSION__ >= 201112L)) || \
+      (JSON_HEDLEY_HAS_FEATURE(c_static_assert) && !defined(JSON_HEDLEY_INTEL_CL_VERSION)) || \
+      JSON_HEDLEY_GCC_VERSION_CHECK(6,0,0) || \
+      JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) || \
+      defined(_Static_assert) \
+    )
+#  define JSON_HEDLEY_STATIC_ASSERT(expr, message) _Static_assert(expr, message)
+#elif \
+  (defined(__cplusplus) && (__cplusplus >= 201103L)) || \
+  JSON_HEDLEY_MSVC_VERSION_CHECK(16,0,0) || \
+  JSON_HEDLEY_INTEL_CL_VERSION_CHECK(2021,1,0)
+#  define JSON_HEDLEY_STATIC_ASSERT(expr, message) JSON_HEDLEY_DIAGNOSTIC_DISABLE_CPP98_COMPAT_WRAP_(static_assert(expr, message))
+#else
+#  define JSON_HEDLEY_STATIC_ASSERT(expr, message)
+#endif
+
+#if defined(JSON_HEDLEY_NULL)
+    #undef JSON_HEDLEY_NULL
+#endif
+#if defined(__cplusplus)
+    #if __cplusplus >= 201103L
+        #define JSON_HEDLEY_NULL JSON_HEDLEY_DIAGNOSTIC_DISABLE_CPP98_COMPAT_WRAP_(nullptr)
+    #elif defined(NULL)
+        #define JSON_HEDLEY_NULL NULL
+    #else
+        #define JSON_HEDLEY_NULL JSON_HEDLEY_STATIC_CAST(void*, 0)
+    #endif
+#elif defined(NULL)
+    #define JSON_HEDLEY_NULL NULL
+#else
+    #define JSON_HEDLEY_NULL ((void*) 0)
+#endif
+
+#if defined(JSON_HEDLEY_MESSAGE)
+    #undef JSON_HEDLEY_MESSAGE
+#endif
+#if JSON_HEDLEY_HAS_WARNING("-Wunknown-pragmas")
+#  define JSON_HEDLEY_MESSAGE(msg) \
+    JSON_HEDLEY_DIAGNOSTIC_PUSH \
+    JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_PRAGMAS \
+    JSON_HEDLEY_PRAGMA(message msg) \
+    JSON_HEDLEY_DIAGNOSTIC_POP
+#elif \
+  JSON_HEDLEY_GCC_VERSION_CHECK(4,4,0) || \
+  JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0)
+#  define JSON_HEDLEY_MESSAGE(msg) JSON_HEDLEY_PRAGMA(message msg)
+#elif JSON_HEDLEY_CRAY_VERSION_CHECK(5,0,0)
+#  define JSON_HEDLEY_MESSAGE(msg) JSON_HEDLEY_PRAGMA(_CRI message msg)
+#elif JSON_HEDLEY_IAR_VERSION_CHECK(8,0,0)
+#  define JSON_HEDLEY_MESSAGE(msg) JSON_HEDLEY_PRAGMA(message(msg))
+#elif JSON_HEDLEY_PELLES_VERSION_CHECK(2,0,0)
+#  define JSON_HEDLEY_MESSAGE(msg) JSON_HEDLEY_PRAGMA(message(msg))
+#else
+#  define JSON_HEDLEY_MESSAGE(msg)
+#endif
+
+#if defined(JSON_HEDLEY_WARNING)
+    #undef JSON_HEDLEY_WARNING
+#endif
+#if JSON_HEDLEY_HAS_WARNING("-Wunknown-pragmas")
+#  define JSON_HEDLEY_WARNING(msg) \
+    JSON_HEDLEY_DIAGNOSTIC_PUSH \
+    JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_PRAGMAS \
+    JSON_HEDLEY_PRAGMA(clang warning msg) \
+    JSON_HEDLEY_DIAGNOSTIC_POP
+#elif \
+  JSON_HEDLEY_GCC_VERSION_CHECK(4,8,0) || \
+  JSON_HEDLEY_PGI_VERSION_CHECK(18,4,0) || \
+  JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0)
+#  define JSON_HEDLEY_WARNING(msg) JSON_HEDLEY_PRAGMA(GCC warning msg)
+#elif \
+  JSON_HEDLEY_MSVC_VERSION_CHECK(15,0,0) || \
+  JSON_HEDLEY_INTEL_CL_VERSION_CHECK(2021,1,0)
+#  define JSON_HEDLEY_WARNING(msg) JSON_HEDLEY_PRAGMA(message(msg))
+#else
+#  define JSON_HEDLEY_WARNING(msg) JSON_HEDLEY_MESSAGE(msg)
+#endif
+
+#if defined(JSON_HEDLEY_REQUIRE)
+    #undef JSON_HEDLEY_REQUIRE
+#endif
+#if defined(JSON_HEDLEY_REQUIRE_MSG)
+    #undef JSON_HEDLEY_REQUIRE_MSG
+#endif
+#if JSON_HEDLEY_HAS_ATTRIBUTE(diagnose_if)
+#  if JSON_HEDLEY_HAS_WARNING("-Wgcc-compat")
+#    define JSON_HEDLEY_REQUIRE(expr) \
+    JSON_HEDLEY_DIAGNOSTIC_PUSH \
+    _Pragma("clang diagnostic ignored \"-Wgcc-compat\"") \
+    __attribute__((diagnose_if(!(expr), #expr, "error"))) \
+    JSON_HEDLEY_DIAGNOSTIC_POP
+#    define JSON_HEDLEY_REQUIRE_MSG(expr,msg) \
+    JSON_HEDLEY_DIAGNOSTIC_PUSH \
+    _Pragma("clang diagnostic ignored \"-Wgcc-compat\"") \
+    __attribute__((diagnose_if(!(expr), msg, "error"))) \
+    JSON_HEDLEY_DIAGNOSTIC_POP
+#  else
+#    define JSON_HEDLEY_REQUIRE(expr) __attribute__((diagnose_if(!(expr), #expr, "error")))
+#    define JSON_HEDLEY_REQUIRE_MSG(expr,msg) __attribute__((diagnose_if(!(expr), msg, "error")))
+#  endif
+#else
+#  define JSON_HEDLEY_REQUIRE(expr)
+#  define JSON_HEDLEY_REQUIRE_MSG(expr,msg)
+#endif
+
+#if defined(JSON_HEDLEY_FLAGS)
+    #undef JSON_HEDLEY_FLAGS
+#endif
+#if JSON_HEDLEY_HAS_ATTRIBUTE(flag_enum) && (!defined(__cplusplus) || JSON_HEDLEY_HAS_WARNING("-Wbitfield-enum-conversion"))
+    #define JSON_HEDLEY_FLAGS __attribute__((__flag_enum__))
+#else
+    #define JSON_HEDLEY_FLAGS
+#endif
+
+#if defined(JSON_HEDLEY_FLAGS_CAST)
+    #undef JSON_HEDLEY_FLAGS_CAST
+#endif
+#if JSON_HEDLEY_INTEL_VERSION_CHECK(19,0,0)
+#  define JSON_HEDLEY_FLAGS_CAST(T, expr) (__extension__ ({ \
+        JSON_HEDLEY_DIAGNOSTIC_PUSH \
+        _Pragma("warning(disable:188)") \
+        ((T) (expr)); \
+        JSON_HEDLEY_DIAGNOSTIC_POP \
+    }))
+#else
+#  define JSON_HEDLEY_FLAGS_CAST(T, expr) JSON_HEDLEY_STATIC_CAST(T, expr)
+#endif
+
+#if defined(JSON_HEDLEY_EMPTY_BASES)
+    #undef JSON_HEDLEY_EMPTY_BASES
+#endif
+#if \
+    (JSON_HEDLEY_MSVC_VERSION_CHECK(19,0,23918) && !JSON_HEDLEY_MSVC_VERSION_CHECK(20,0,0)) || \
+    JSON_HEDLEY_INTEL_CL_VERSION_CHECK(2021,1,0)
+    #define JSON_HEDLEY_EMPTY_BASES __declspec(empty_bases)
+#else
+    #define JSON_HEDLEY_EMPTY_BASES
+#endif
+
+/* Remaining macros are deprecated. */
+
+#if defined(JSON_HEDLEY_GCC_NOT_CLANG_VERSION_CHECK)
+    #undef JSON_HEDLEY_GCC_NOT_CLANG_VERSION_CHECK
+#endif
+#if defined(__clang__)
+    #define JSON_HEDLEY_GCC_NOT_CLANG_VERSION_CHECK(major,minor,patch) (0)
+#else
+    #define JSON_HEDLEY_GCC_NOT_CLANG_VERSION_CHECK(major,minor,patch) JSON_HEDLEY_GCC_VERSION_CHECK(major,minor,patch)
+#endif
+
+#if defined(JSON_HEDLEY_CLANG_HAS_ATTRIBUTE)
+    #undef JSON_HEDLEY_CLANG_HAS_ATTRIBUTE
+#endif
+#define JSON_HEDLEY_CLANG_HAS_ATTRIBUTE(attribute) JSON_HEDLEY_HAS_ATTRIBUTE(attribute)
+
+#if defined(JSON_HEDLEY_CLANG_HAS_CPP_ATTRIBUTE)
+    #undef JSON_HEDLEY_CLANG_HAS_CPP_ATTRIBUTE
+#endif
+#define JSON_HEDLEY_CLANG_HAS_CPP_ATTRIBUTE(attribute) JSON_HEDLEY_HAS_CPP_ATTRIBUTE(attribute)
+
+#if defined(JSON_HEDLEY_CLANG_HAS_BUILTIN)
+    #undef JSON_HEDLEY_CLANG_HAS_BUILTIN
+#endif
+#define JSON_HEDLEY_CLANG_HAS_BUILTIN(builtin) JSON_HEDLEY_HAS_BUILTIN(builtin)
+
+#if defined(JSON_HEDLEY_CLANG_HAS_FEATURE)
+    #undef JSON_HEDLEY_CLANG_HAS_FEATURE
+#endif
+#define JSON_HEDLEY_CLANG_HAS_FEATURE(feature) JSON_HEDLEY_HAS_FEATURE(feature)
+
+#if defined(JSON_HEDLEY_CLANG_HAS_EXTENSION)
+    #undef JSON_HEDLEY_CLANG_HAS_EXTENSION
+#endif
+#define JSON_HEDLEY_CLANG_HAS_EXTENSION(extension) JSON_HEDLEY_HAS_EXTENSION(extension)
+
+#if defined(JSON_HEDLEY_CLANG_HAS_DECLSPEC_DECLSPEC_ATTRIBUTE)
+    #undef JSON_HEDLEY_CLANG_HAS_DECLSPEC_DECLSPEC_ATTRIBUTE
+#endif
+#define JSON_HEDLEY_CLANG_HAS_DECLSPEC_ATTRIBUTE(attribute) JSON_HEDLEY_HAS_DECLSPEC_ATTRIBUTE(attribute)
+
+#if defined(JSON_HEDLEY_CLANG_HAS_WARNING)
+    #undef JSON_HEDLEY_CLANG_HAS_WARNING
+#endif
+#define JSON_HEDLEY_CLANG_HAS_WARNING(warning) JSON_HEDLEY_HAS_WARNING(warning)
+
+#endif /* !defined(JSON_HEDLEY_VERSION) || (JSON_HEDLEY_VERSION < X) */
+
+
+// This file contains all internal macro definitions (except those affecting ABI)
+// You MUST include macro_unscope.hpp at the end of json.hpp to undef all of them
+
+// #include 
+
+
+// exclude unsupported compilers
+#if !defined(JSON_SKIP_UNSUPPORTED_COMPILER_CHECK)
+    #if defined(__clang__)
+        #if (__clang_major__ * 10000 + __clang_minor__ * 100 + __clang_patchlevel__) < 30400
+            #error "unsupported Clang version - see https://github.com/nlohmann/json#supported-compilers"
+        #endif
+    #elif defined(__GNUC__) && !(defined(__ICC) || defined(__INTEL_COMPILER))
+        #if (__GNUC__ * 10000 + __GNUC_MINOR__ * 100 + __GNUC_PATCHLEVEL__) < 40800
+            #error "unsupported GCC version - see https://github.com/nlohmann/json#supported-compilers"
+        #endif
+    #endif
+#endif
+
+// C++ language standard detection
+// if the user manually specified the used c++ version this is skipped
+#if !defined(JSON_HAS_CPP_20) && !defined(JSON_HAS_CPP_17) && !defined(JSON_HAS_CPP_14) && !defined(JSON_HAS_CPP_11)
+    #if (defined(__cplusplus) && __cplusplus >= 202002L) || (defined(_MSVC_LANG) && _MSVC_LANG >= 202002L)
+        #define JSON_HAS_CPP_20
+        #define JSON_HAS_CPP_17
+        #define JSON_HAS_CPP_14
+    #elif (defined(__cplusplus) && __cplusplus >= 201703L) || (defined(_HAS_CXX17) && _HAS_CXX17 == 1) // fix for issue #464
+        #define JSON_HAS_CPP_17
+        #define JSON_HAS_CPP_14
+    #elif (defined(__cplusplus) && __cplusplus >= 201402L) || (defined(_HAS_CXX14) && _HAS_CXX14 == 1)
+        #define JSON_HAS_CPP_14
+    #endif
+    // the cpp 11 flag is always specified because it is the minimal required version
+    #define JSON_HAS_CPP_11
+#endif
+
+#ifdef __has_include
+    #if __has_include()
+        #include 
+    #endif
+#endif
+
+#if !defined(JSON_HAS_FILESYSTEM) && !defined(JSON_HAS_EXPERIMENTAL_FILESYSTEM)
+    #ifdef JSON_HAS_CPP_17
+        #if defined(__cpp_lib_filesystem)
+            #define JSON_HAS_FILESYSTEM 1
+        #elif defined(__cpp_lib_experimental_filesystem)
+            #define JSON_HAS_EXPERIMENTAL_FILESYSTEM 1
+        #elif !defined(__has_include)
+            #define JSON_HAS_EXPERIMENTAL_FILESYSTEM 1
+        #elif __has_include()
+            #define JSON_HAS_FILESYSTEM 1
+        #elif __has_include()
+            #define JSON_HAS_EXPERIMENTAL_FILESYSTEM 1
+        #endif
+
+        // std::filesystem does not work on MinGW GCC 8: https://sourceforge.net/p/mingw-w64/bugs/737/
+        #if defined(__MINGW32__) && defined(__GNUC__) && __GNUC__ == 8
+            #undef JSON_HAS_FILESYSTEM
+            #undef JSON_HAS_EXPERIMENTAL_FILESYSTEM
+        #endif
+
+        // no filesystem support before GCC 8: https://en.cppreference.com/w/cpp/compiler_support
+        #if defined(__GNUC__) && !defined(__clang__) && __GNUC__ < 8
+            #undef JSON_HAS_FILESYSTEM
+            #undef JSON_HAS_EXPERIMENTAL_FILESYSTEM
+        #endif
+
+        // no filesystem support before Clang 7: https://en.cppreference.com/w/cpp/compiler_support
+        #if defined(__clang_major__) && __clang_major__ < 7
+            #undef JSON_HAS_FILESYSTEM
+            #undef JSON_HAS_EXPERIMENTAL_FILESYSTEM
+        #endif
+
+        // no filesystem support before MSVC 19.14: https://en.cppreference.com/w/cpp/compiler_support
+        #if defined(_MSC_VER) && _MSC_VER < 1914
+            #undef JSON_HAS_FILESYSTEM
+            #undef JSON_HAS_EXPERIMENTAL_FILESYSTEM
+        #endif
+
+        // no filesystem support before iOS 13
+        #if defined(__IPHONE_OS_VERSION_MIN_REQUIRED) && __IPHONE_OS_VERSION_MIN_REQUIRED < 130000
+            #undef JSON_HAS_FILESYSTEM
+            #undef JSON_HAS_EXPERIMENTAL_FILESYSTEM
+        #endif
+
+        // no filesystem support before macOS Catalina
+        #if defined(__MAC_OS_X_VERSION_MIN_REQUIRED) && __MAC_OS_X_VERSION_MIN_REQUIRED < 101500
+            #undef JSON_HAS_FILESYSTEM
+            #undef JSON_HAS_EXPERIMENTAL_FILESYSTEM
+        #endif
+    #endif
+#endif
+
+#ifndef JSON_HAS_EXPERIMENTAL_FILESYSTEM
+    #define JSON_HAS_EXPERIMENTAL_FILESYSTEM 0
+#endif
+
+#ifndef JSON_HAS_FILESYSTEM
+    #define JSON_HAS_FILESYSTEM 0
+#endif
+
+#ifndef JSON_HAS_THREE_WAY_COMPARISON
+    #if defined(__cpp_impl_three_way_comparison) && __cpp_impl_three_way_comparison >= 201907L \
+        && defined(__cpp_lib_three_way_comparison) && __cpp_lib_three_way_comparison >= 201907L
+        #define JSON_HAS_THREE_WAY_COMPARISON 1
+    #else
+        #define JSON_HAS_THREE_WAY_COMPARISON 0
+    #endif
+#endif
+
+#ifndef JSON_HAS_RANGES
+    // ranges header shipping in GCC 11.1.0 (released 2021-04-27) has syntax error
+    #if defined(__GLIBCXX__) && __GLIBCXX__ == 20210427
+        #define JSON_HAS_RANGES 0
+    #elif defined(__cpp_lib_ranges)
+        #define JSON_HAS_RANGES 1
+    #else
+        #define JSON_HAS_RANGES 0
+    #endif
+#endif
+
+#ifndef JSON_HAS_STATIC_RTTI
+    #if !defined(_HAS_STATIC_RTTI) || _HAS_STATIC_RTTI != 0
+        #define JSON_HAS_STATIC_RTTI 1
+    #else
+        #define JSON_HAS_STATIC_RTTI 0
+    #endif
+#endif
+
+#ifdef JSON_HAS_CPP_17
+    #define JSON_INLINE_VARIABLE inline
+#else
+    #define JSON_INLINE_VARIABLE
+#endif
+
+#if JSON_HEDLEY_HAS_ATTRIBUTE(no_unique_address)
+    #define JSON_NO_UNIQUE_ADDRESS [[no_unique_address]]
+#else
+    #define JSON_NO_UNIQUE_ADDRESS
+#endif
+
+// disable documentation warnings on clang
+#if defined(__clang__)
+    #pragma clang diagnostic push
+    #pragma clang diagnostic ignored "-Wdocumentation"
+    #pragma clang diagnostic ignored "-Wdocumentation-unknown-command"
+#endif
+
+// allow disabling exceptions
+#if (defined(__cpp_exceptions) || defined(__EXCEPTIONS) || defined(_CPPUNWIND)) && !defined(JSON_NOEXCEPTION)
+    #define JSON_THROW(exception) throw exception
+    #define JSON_TRY try
+    #define JSON_CATCH(exception) catch(exception)
+    #define JSON_INTERNAL_CATCH(exception) catch(exception)
+#else
+    #include 
+    #define JSON_THROW(exception) std::abort()
+    #define JSON_TRY if(true)
+    #define JSON_CATCH(exception) if(false)
+    #define JSON_INTERNAL_CATCH(exception) if(false)
+#endif
+
+// override exception macros
+#if defined(JSON_THROW_USER)
+    #undef JSON_THROW
+    #define JSON_THROW JSON_THROW_USER
+#endif
+#if defined(JSON_TRY_USER)
+    #undef JSON_TRY
+    #define JSON_TRY JSON_TRY_USER
+#endif
+#if defined(JSON_CATCH_USER)
+    #undef JSON_CATCH
+    #define JSON_CATCH JSON_CATCH_USER
+    #undef JSON_INTERNAL_CATCH
+    #define JSON_INTERNAL_CATCH JSON_CATCH_USER
+#endif
+#if defined(JSON_INTERNAL_CATCH_USER)
+    #undef JSON_INTERNAL_CATCH
+    #define JSON_INTERNAL_CATCH JSON_INTERNAL_CATCH_USER
+#endif
+
+// allow overriding assert
+#if !defined(JSON_ASSERT)
+    #include  // assert
+    #define JSON_ASSERT(x) assert(x)
+#endif
+
+// allow to access some private functions (needed by the test suite)
+#if defined(JSON_TESTS_PRIVATE)
+    #define JSON_PRIVATE_UNLESS_TESTED public
+#else
+    #define JSON_PRIVATE_UNLESS_TESTED private
+#endif
+
+/*!
+@brief macro to briefly define a mapping between an enum and JSON
+@def NLOHMANN_JSON_SERIALIZE_ENUM
+@since version 3.4.0
+*/
+#define NLOHMANN_JSON_SERIALIZE_ENUM(ENUM_TYPE, ...)                                            \
+    template                                                            \
+    inline void to_json(BasicJsonType& j, const ENUM_TYPE& e)                                   \
+    {                                                                                           \
+        static_assert(std::is_enum::value, #ENUM_TYPE " must be an enum!");          \
+        static const std::pair m[] = __VA_ARGS__;                     \
+        auto it = std::find_if(std::begin(m), std::end(m),                                      \
+                               [e](const std::pair& ej_pair) -> bool  \
+        {                                                                                       \
+            return ej_pair.first == e;                                                          \
+        });                                                                                     \
+        j = ((it != std::end(m)) ? it : std::begin(m))->second;                                 \
+    }                                                                                           \
+    template                                                            \
+    inline void from_json(const BasicJsonType& j, ENUM_TYPE& e)                                 \
+    {                                                                                           \
+        static_assert(std::is_enum::value, #ENUM_TYPE " must be an enum!");          \
+        static const std::pair m[] = __VA_ARGS__;                     \
+        auto it = std::find_if(std::begin(m), std::end(m),                                      \
+                               [&j](const std::pair& ej_pair) -> bool \
+        {                                                                                       \
+            return ej_pair.second == j;                                                         \
+        });                                                                                     \
+        e = ((it != std::end(m)) ? it : std::begin(m))->first;                                  \
+    }
+
+// Ugly macros to avoid uglier copy-paste when specializing basic_json. They
+// may be removed in the future once the class is split.
+
+#define NLOHMANN_BASIC_JSON_TPL_DECLARATION                                \
+    template class ObjectType,   \
+             template class ArrayType,              \
+             class StringType, class BooleanType, class NumberIntegerType, \
+             class NumberUnsignedType, class NumberFloatType,              \
+             template class AllocatorType,                       \
+             template class JSONSerializer,     \
+             class BinaryType,                                             \
+             class CustomBaseClass>
+
+#define NLOHMANN_BASIC_JSON_TPL                                            \
+    basic_json
+
+// Macros to simplify conversion from/to types
+
+#define NLOHMANN_JSON_EXPAND( x ) x
+#define NLOHMANN_JSON_GET_MACRO(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18, _19, _20, _21, _22, _23, _24, _25, _26, _27, _28, _29, _30, _31, _32, _33, _34, _35, _36, _37, _38, _39, _40, _41, _42, _43, _44, _45, _46, _47, _48, _49, _50, _51, _52, _53, _54, _55, _56, _57, _58, _59, _60, _61, _62, _63, _64, NAME,...) NAME
+#define NLOHMANN_JSON_PASTE(...) NLOHMANN_JSON_EXPAND(NLOHMANN_JSON_GET_MACRO(__VA_ARGS__, \
+        NLOHMANN_JSON_PASTE64, \
+        NLOHMANN_JSON_PASTE63, \
+        NLOHMANN_JSON_PASTE62, \
+        NLOHMANN_JSON_PASTE61, \
+        NLOHMANN_JSON_PASTE60, \
+        NLOHMANN_JSON_PASTE59, \
+        NLOHMANN_JSON_PASTE58, \
+        NLOHMANN_JSON_PASTE57, \
+        NLOHMANN_JSON_PASTE56, \
+        NLOHMANN_JSON_PASTE55, \
+        NLOHMANN_JSON_PASTE54, \
+        NLOHMANN_JSON_PASTE53, \
+        NLOHMANN_JSON_PASTE52, \
+        NLOHMANN_JSON_PASTE51, \
+        NLOHMANN_JSON_PASTE50, \
+        NLOHMANN_JSON_PASTE49, \
+        NLOHMANN_JSON_PASTE48, \
+        NLOHMANN_JSON_PASTE47, \
+        NLOHMANN_JSON_PASTE46, \
+        NLOHMANN_JSON_PASTE45, \
+        NLOHMANN_JSON_PASTE44, \
+        NLOHMANN_JSON_PASTE43, \
+        NLOHMANN_JSON_PASTE42, \
+        NLOHMANN_JSON_PASTE41, \
+        NLOHMANN_JSON_PASTE40, \
+        NLOHMANN_JSON_PASTE39, \
+        NLOHMANN_JSON_PASTE38, \
+        NLOHMANN_JSON_PASTE37, \
+        NLOHMANN_JSON_PASTE36, \
+        NLOHMANN_JSON_PASTE35, \
+        NLOHMANN_JSON_PASTE34, \
+        NLOHMANN_JSON_PASTE33, \
+        NLOHMANN_JSON_PASTE32, \
+        NLOHMANN_JSON_PASTE31, \
+        NLOHMANN_JSON_PASTE30, \
+        NLOHMANN_JSON_PASTE29, \
+        NLOHMANN_JSON_PASTE28, \
+        NLOHMANN_JSON_PASTE27, \
+        NLOHMANN_JSON_PASTE26, \
+        NLOHMANN_JSON_PASTE25, \
+        NLOHMANN_JSON_PASTE24, \
+        NLOHMANN_JSON_PASTE23, \
+        NLOHMANN_JSON_PASTE22, \
+        NLOHMANN_JSON_PASTE21, \
+        NLOHMANN_JSON_PASTE20, \
+        NLOHMANN_JSON_PASTE19, \
+        NLOHMANN_JSON_PASTE18, \
+        NLOHMANN_JSON_PASTE17, \
+        NLOHMANN_JSON_PASTE16, \
+        NLOHMANN_JSON_PASTE15, \
+        NLOHMANN_JSON_PASTE14, \
+        NLOHMANN_JSON_PASTE13, \
+        NLOHMANN_JSON_PASTE12, \
+        NLOHMANN_JSON_PASTE11, \
+        NLOHMANN_JSON_PASTE10, \
+        NLOHMANN_JSON_PASTE9, \
+        NLOHMANN_JSON_PASTE8, \
+        NLOHMANN_JSON_PASTE7, \
+        NLOHMANN_JSON_PASTE6, \
+        NLOHMANN_JSON_PASTE5, \
+        NLOHMANN_JSON_PASTE4, \
+        NLOHMANN_JSON_PASTE3, \
+        NLOHMANN_JSON_PASTE2, \
+        NLOHMANN_JSON_PASTE1)(__VA_ARGS__))
+#define NLOHMANN_JSON_PASTE2(func, v1) func(v1)
+#define NLOHMANN_JSON_PASTE3(func, v1, v2) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE2(func, v2)
+#define NLOHMANN_JSON_PASTE4(func, v1, v2, v3) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE3(func, v2, v3)
+#define NLOHMANN_JSON_PASTE5(func, v1, v2, v3, v4) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE4(func, v2, v3, v4)
+#define NLOHMANN_JSON_PASTE6(func, v1, v2, v3, v4, v5) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE5(func, v2, v3, v4, v5)
+#define NLOHMANN_JSON_PASTE7(func, v1, v2, v3, v4, v5, v6) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE6(func, v2, v3, v4, v5, v6)
+#define NLOHMANN_JSON_PASTE8(func, v1, v2, v3, v4, v5, v6, v7) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE7(func, v2, v3, v4, v5, v6, v7)
+#define NLOHMANN_JSON_PASTE9(func, v1, v2, v3, v4, v5, v6, v7, v8) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE8(func, v2, v3, v4, v5, v6, v7, v8)
+#define NLOHMANN_JSON_PASTE10(func, v1, v2, v3, v4, v5, v6, v7, v8, v9) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE9(func, v2, v3, v4, v5, v6, v7, v8, v9)
+#define NLOHMANN_JSON_PASTE11(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE10(func, v2, v3, v4, v5, v6, v7, v8, v9, v10)
+#define NLOHMANN_JSON_PASTE12(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE11(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11)
+#define NLOHMANN_JSON_PASTE13(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE12(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12)
+#define NLOHMANN_JSON_PASTE14(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE13(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13)
+#define NLOHMANN_JSON_PASTE15(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE14(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14)
+#define NLOHMANN_JSON_PASTE16(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE15(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15)
+#define NLOHMANN_JSON_PASTE17(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE16(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16)
+#define NLOHMANN_JSON_PASTE18(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE17(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17)
+#define NLOHMANN_JSON_PASTE19(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE18(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18)
+#define NLOHMANN_JSON_PASTE20(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE19(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19)
+#define NLOHMANN_JSON_PASTE21(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE20(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20)
+#define NLOHMANN_JSON_PASTE22(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE21(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21)
+#define NLOHMANN_JSON_PASTE23(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE22(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22)
+#define NLOHMANN_JSON_PASTE24(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE23(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23)
+#define NLOHMANN_JSON_PASTE25(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE24(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24)
+#define NLOHMANN_JSON_PASTE26(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE25(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25)
+#define NLOHMANN_JSON_PASTE27(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE26(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26)
+#define NLOHMANN_JSON_PASTE28(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE27(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27)
+#define NLOHMANN_JSON_PASTE29(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE28(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28)
+#define NLOHMANN_JSON_PASTE30(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE29(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29)
+#define NLOHMANN_JSON_PASTE31(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE30(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30)
+#define NLOHMANN_JSON_PASTE32(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE31(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31)
+#define NLOHMANN_JSON_PASTE33(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE32(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32)
+#define NLOHMANN_JSON_PASTE34(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE33(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33)
+#define NLOHMANN_JSON_PASTE35(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE34(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34)
+#define NLOHMANN_JSON_PASTE36(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE35(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35)
+#define NLOHMANN_JSON_PASTE37(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE36(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36)
+#define NLOHMANN_JSON_PASTE38(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE37(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37)
+#define NLOHMANN_JSON_PASTE39(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE38(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38)
+#define NLOHMANN_JSON_PASTE40(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE39(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39)
+#define NLOHMANN_JSON_PASTE41(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE40(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40)
+#define NLOHMANN_JSON_PASTE42(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE41(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41)
+#define NLOHMANN_JSON_PASTE43(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE42(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42)
+#define NLOHMANN_JSON_PASTE44(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE43(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43)
+#define NLOHMANN_JSON_PASTE45(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE44(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44)
+#define NLOHMANN_JSON_PASTE46(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE45(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45)
+#define NLOHMANN_JSON_PASTE47(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE46(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46)
+#define NLOHMANN_JSON_PASTE48(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE47(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47)
+#define NLOHMANN_JSON_PASTE49(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE48(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48)
+#define NLOHMANN_JSON_PASTE50(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE49(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49)
+#define NLOHMANN_JSON_PASTE51(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE50(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50)
+#define NLOHMANN_JSON_PASTE52(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE51(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51)
+#define NLOHMANN_JSON_PASTE53(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE52(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52)
+#define NLOHMANN_JSON_PASTE54(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52, v53) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE53(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52, v53)
+#define NLOHMANN_JSON_PASTE55(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52, v53, v54) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE54(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52, v53, v54)
+#define NLOHMANN_JSON_PASTE56(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52, v53, v54, v55) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE55(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52, v53, v54, v55)
+#define NLOHMANN_JSON_PASTE57(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52, v53, v54, v55, v56) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE56(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52, v53, v54, v55, v56)
+#define NLOHMANN_JSON_PASTE58(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52, v53, v54, v55, v56, v57) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE57(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52, v53, v54, v55, v56, v57)
+#define NLOHMANN_JSON_PASTE59(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52, v53, v54, v55, v56, v57, v58) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE58(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52, v53, v54, v55, v56, v57, v58)
+#define NLOHMANN_JSON_PASTE60(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52, v53, v54, v55, v56, v57, v58, v59) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE59(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52, v53, v54, v55, v56, v57, v58, v59)
+#define NLOHMANN_JSON_PASTE61(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52, v53, v54, v55, v56, v57, v58, v59, v60) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE60(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52, v53, v54, v55, v56, v57, v58, v59, v60)
+#define NLOHMANN_JSON_PASTE62(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52, v53, v54, v55, v56, v57, v58, v59, v60, v61) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE61(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52, v53, v54, v55, v56, v57, v58, v59, v60, v61)
+#define NLOHMANN_JSON_PASTE63(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52, v53, v54, v55, v56, v57, v58, v59, v60, v61, v62) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE62(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52, v53, v54, v55, v56, v57, v58, v59, v60, v61, v62)
+#define NLOHMANN_JSON_PASTE64(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52, v53, v54, v55, v56, v57, v58, v59, v60, v61, v62, v63) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE63(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52, v53, v54, v55, v56, v57, v58, v59, v60, v61, v62, v63)
+
+#define NLOHMANN_JSON_TO(v1) nlohmann_json_j[#v1] = nlohmann_json_t.v1;
+#define NLOHMANN_JSON_FROM(v1) nlohmann_json_j.at(#v1).get_to(nlohmann_json_t.v1);
+#define NLOHMANN_JSON_FROM_WITH_DEFAULT(v1) nlohmann_json_t.v1 = nlohmann_json_j.value(#v1, nlohmann_json_default_obj.v1);
+
+/*!
+@brief macro
+@def NLOHMANN_DEFINE_TYPE_INTRUSIVE
+@since version 3.9.0
+*/
+#define NLOHMANN_DEFINE_TYPE_INTRUSIVE(Type, ...)  \
+    friend void to_json(nlohmann::json& nlohmann_json_j, const Type& nlohmann_json_t) { NLOHMANN_JSON_EXPAND(NLOHMANN_JSON_PASTE(NLOHMANN_JSON_TO, __VA_ARGS__)) } \
+    friend void from_json(const nlohmann::json& nlohmann_json_j, Type& nlohmann_json_t) { NLOHMANN_JSON_EXPAND(NLOHMANN_JSON_PASTE(NLOHMANN_JSON_FROM, __VA_ARGS__)) }
+
+#define NLOHMANN_DEFINE_TYPE_INTRUSIVE_WITH_DEFAULT(Type, ...)  \
+    friend void to_json(nlohmann::json& nlohmann_json_j, const Type& nlohmann_json_t) { NLOHMANN_JSON_EXPAND(NLOHMANN_JSON_PASTE(NLOHMANN_JSON_TO, __VA_ARGS__)) } \
+    friend void from_json(const nlohmann::json& nlohmann_json_j, Type& nlohmann_json_t) { const Type nlohmann_json_default_obj{}; NLOHMANN_JSON_EXPAND(NLOHMANN_JSON_PASTE(NLOHMANN_JSON_FROM_WITH_DEFAULT, __VA_ARGS__)) }
+
+#define NLOHMANN_DEFINE_TYPE_INTRUSIVE_ONLY_SERIALIZE(Type, ...)  \
+    friend void to_json(nlohmann::json& nlohmann_json_j, const Type& nlohmann_json_t) { NLOHMANN_JSON_EXPAND(NLOHMANN_JSON_PASTE(NLOHMANN_JSON_TO, __VA_ARGS__)) }
+
+/*!
+@brief macro
+@def NLOHMANN_DEFINE_TYPE_NON_INTRUSIVE
+@since version 3.9.0
+*/
+#define NLOHMANN_DEFINE_TYPE_NON_INTRUSIVE(Type, ...)  \
+    inline void to_json(nlohmann::json& nlohmann_json_j, const Type& nlohmann_json_t) { NLOHMANN_JSON_EXPAND(NLOHMANN_JSON_PASTE(NLOHMANN_JSON_TO, __VA_ARGS__)) } \
+    inline void from_json(const nlohmann::json& nlohmann_json_j, Type& nlohmann_json_t) { NLOHMANN_JSON_EXPAND(NLOHMANN_JSON_PASTE(NLOHMANN_JSON_FROM, __VA_ARGS__)) }
+
+#define NLOHMANN_DEFINE_TYPE_NON_INTRUSIVE_ONLY_SERIALIZE(Type, ...)  \
+    inline void to_json(nlohmann::json& nlohmann_json_j, const Type& nlohmann_json_t) { NLOHMANN_JSON_EXPAND(NLOHMANN_JSON_PASTE(NLOHMANN_JSON_TO, __VA_ARGS__)) }
+
+#define NLOHMANN_DEFINE_TYPE_NON_INTRUSIVE_WITH_DEFAULT(Type, ...)  \
+    inline void to_json(nlohmann::json& nlohmann_json_j, const Type& nlohmann_json_t) { NLOHMANN_JSON_EXPAND(NLOHMANN_JSON_PASTE(NLOHMANN_JSON_TO, __VA_ARGS__)) } \
+    inline void from_json(const nlohmann::json& nlohmann_json_j, Type& nlohmann_json_t) { const Type nlohmann_json_default_obj{}; NLOHMANN_JSON_EXPAND(NLOHMANN_JSON_PASTE(NLOHMANN_JSON_FROM_WITH_DEFAULT, __VA_ARGS__)) }
+
+// inspired from https://stackoverflow.com/a/26745591
+// allows to call any std function as if (e.g. with begin):
+// using std::begin; begin(x);
+//
+// it allows using the detected idiom to retrieve the return type
+// of such an expression
+#define NLOHMANN_CAN_CALL_STD_FUNC_IMPL(std_name)                                 \
+    namespace detail {                                                            \
+    using std::std_name;                                                          \
+    \
+    template                                                       \
+    using result_of_##std_name = decltype(std_name(std::declval()...));        \
+    }                                                                             \
+    \
+    namespace detail2 {                                                           \
+    struct std_name##_tag                                                         \
+    {                                                                             \
+    };                                                                            \
+    \
+    template                                                       \
+    std_name##_tag std_name(T&&...);                                              \
+    \
+    template                                                       \
+    using result_of_##std_name = decltype(std_name(std::declval()...));        \
+    \
+    template                                                       \
+    struct would_call_std_##std_name                                              \
+    {                                                                             \
+        static constexpr auto const value = ::nlohmann::detail::                  \
+                                            is_detected_exact::value; \
+    };                                                                            \
+    } /* namespace detail2 */ \
+    \
+    template                                                       \
+    struct would_call_std_##std_name : detail2::would_call_std_##std_name   \
+    {                                                                             \
+    }
+
+#ifndef JSON_USE_IMPLICIT_CONVERSIONS
+    #define JSON_USE_IMPLICIT_CONVERSIONS 1
+#endif
+
+#if JSON_USE_IMPLICIT_CONVERSIONS
+    #define JSON_EXPLICIT
+#else
+    #define JSON_EXPLICIT explicit
+#endif
+
+#ifndef JSON_DISABLE_ENUM_SERIALIZATION
+    #define JSON_DISABLE_ENUM_SERIALIZATION 0
+#endif
+
+#ifndef JSON_USE_GLOBAL_UDLS
+    #define JSON_USE_GLOBAL_UDLS 1
+#endif
+
+#if JSON_HAS_THREE_WAY_COMPARISON
+    #include  // partial_ordering
+#endif
+
+NLOHMANN_JSON_NAMESPACE_BEGIN
+namespace detail
+{
+
+///////////////////////////
+// JSON type enumeration //
+///////////////////////////
+
+/*!
+@brief the JSON type enumeration
+
+This enumeration collects the different JSON types. It is internally used to
+distinguish the stored values, and the functions @ref basic_json::is_null(),
+@ref basic_json::is_object(), @ref basic_json::is_array(),
+@ref basic_json::is_string(), @ref basic_json::is_boolean(),
+@ref basic_json::is_number() (with @ref basic_json::is_number_integer(),
+@ref basic_json::is_number_unsigned(), and @ref basic_json::is_number_float()),
+@ref basic_json::is_discarded(), @ref basic_json::is_primitive(), and
+@ref basic_json::is_structured() rely on it.
+
+@note There are three enumeration entries (number_integer, number_unsigned, and
+number_float), because the library distinguishes these three types for numbers:
+@ref basic_json::number_unsigned_t is used for unsigned integers,
+@ref basic_json::number_integer_t is used for signed integers, and
+@ref basic_json::number_float_t is used for floating-point numbers or to
+approximate integers which do not fit in the limits of their respective type.
+
+@sa see @ref basic_json::basic_json(const value_t value_type) -- create a JSON
+value with the default value for a given type
+
+@since version 1.0.0
+*/
+enum class value_t : std::uint8_t
+{
+    null,             ///< null value
+    object,           ///< object (unordered set of name/value pairs)
+    array,            ///< array (ordered collection of values)
+    string,           ///< string value
+    boolean,          ///< boolean value
+    number_integer,   ///< number value (signed integer)
+    number_unsigned,  ///< number value (unsigned integer)
+    number_float,     ///< number value (floating-point)
+    binary,           ///< binary array (ordered collection of bytes)
+    discarded         ///< discarded by the parser callback function
+};
+
+/*!
+@brief comparison operator for JSON types
+
+Returns an ordering that is similar to Python:
+- order: null < boolean < number < object < array < string < binary
+- furthermore, each type is not smaller than itself
+- discarded values are not comparable
+- binary is represented as a b"" string in python and directly comparable to a
+  string; however, making a binary array directly comparable with a string would
+  be surprising behavior in a JSON file.
+
+@since version 1.0.0
+*/
+#if JSON_HAS_THREE_WAY_COMPARISON
+    inline std::partial_ordering operator<=>(const value_t lhs, const value_t rhs) noexcept // *NOPAD*
+#else
+    inline bool operator<(const value_t lhs, const value_t rhs) noexcept
+#endif
+{
+    static constexpr std::array order = {{
+            0 /* null */, 3 /* object */, 4 /* array */, 5 /* string */,
+            1 /* boolean */, 2 /* integer */, 2 /* unsigned */, 2 /* float */,
+            6 /* binary */
+        }
+    };
+
+    const auto l_index = static_cast(lhs);
+    const auto r_index = static_cast(rhs);
+#if JSON_HAS_THREE_WAY_COMPARISON
+    if (l_index < order.size() && r_index < order.size())
+    {
+        return order[l_index] <=> order[r_index]; // *NOPAD*
+    }
+    return std::partial_ordering::unordered;
+#else
+    return l_index < order.size() && r_index < order.size() && order[l_index] < order[r_index];
+#endif
+}
+
+// GCC selects the built-in operator< over an operator rewritten from
+// a user-defined spaceship operator
+// Clang, MSVC, and ICC select the rewritten candidate
+// (see GCC bug https://gcc.gnu.org/bugzilla/show_bug.cgi?id=105200)
+#if JSON_HAS_THREE_WAY_COMPARISON && defined(__GNUC__)
+inline bool operator<(const value_t lhs, const value_t rhs) noexcept
+{
+    return std::is_lt(lhs <=> rhs); // *NOPAD*
+}
+#endif
+
+}  // namespace detail
+NLOHMANN_JSON_NAMESPACE_END
+
+// #include 
+//     __ _____ _____ _____
+//  __|  |   __|     |   | |  JSON for Modern C++
+// |  |  |__   |  |  | | | |  version 3.11.3
+// |_____|_____|_____|_|___|  https://github.com/nlohmann/json
+//
+// SPDX-FileCopyrightText: 2013-2023 Niels Lohmann 
+// SPDX-License-Identifier: MIT
+
+
+
+// #include 
+
+
+NLOHMANN_JSON_NAMESPACE_BEGIN
+namespace detail
+{
+
+/*!
+@brief replace all occurrences of a substring by another string
+
+@param[in,out] s  the string to manipulate; changed so that all
+               occurrences of @a f are replaced with @a t
+@param[in]     f  the substring to replace with @a t
+@param[in]     t  the string to replace @a f
+
+@pre The search string @a f must not be empty. **This precondition is
+enforced with an assertion.**
+
+@since version 2.0.0
+*/
+template
+inline void replace_substring(StringType& s, const StringType& f,
+                              const StringType& t)
+{
+    JSON_ASSERT(!f.empty());
+    for (auto pos = s.find(f);                // find first occurrence of f
+            pos != StringType::npos;          // make sure f was found
+            s.replace(pos, f.size(), t),      // replace with t, and
+            pos = s.find(f, pos + t.size()))  // find next occurrence of f
+    {}
+}
+
+/*!
+ * @brief string escaping as described in RFC 6901 (Sect. 4)
+ * @param[in] s string to escape
+ * @return    escaped string
+ *
+ * Note the order of escaping "~" to "~0" and "/" to "~1" is important.
+ */
+template
+inline StringType escape(StringType s)
+{
+    replace_substring(s, StringType{"~"}, StringType{"~0"});
+    replace_substring(s, StringType{"/"}, StringType{"~1"});
+    return s;
+}
+
+/*!
+ * @brief string unescaping as described in RFC 6901 (Sect. 4)
+ * @param[in] s string to unescape
+ * @return    unescaped string
+ *
+ * Note the order of escaping "~1" to "/" and "~0" to "~" is important.
+ */
+template
+static void unescape(StringType& s)
+{
+    replace_substring(s, StringType{"~1"}, StringType{"/"});
+    replace_substring(s, StringType{"~0"}, StringType{"~"});
+}
+
+}  // namespace detail
+NLOHMANN_JSON_NAMESPACE_END
+
+// #include 
+//     __ _____ _____ _____
+//  __|  |   __|     |   | |  JSON for Modern C++
+// |  |  |__   |  |  | | | |  version 3.11.3
+// |_____|_____|_____|_|___|  https://github.com/nlohmann/json
+//
+// SPDX-FileCopyrightText: 2013-2023 Niels Lohmann 
+// SPDX-License-Identifier: MIT
+
+
+
+#include  // size_t
+
+// #include 
+
+
+NLOHMANN_JSON_NAMESPACE_BEGIN
+namespace detail
+{
+
+/// struct to capture the start position of the current token
+struct position_t
+{
+    /// the total number of characters read
+    std::size_t chars_read_total = 0;
+    /// the number of characters read in the current line
+    std::size_t chars_read_current_line = 0;
+    /// the number of lines read
+    std::size_t lines_read = 0;
+
+    /// conversion to size_t to preserve SAX interface
+    constexpr operator size_t() const
+    {
+        return chars_read_total;
+    }
+};
+
+}  // namespace detail
+NLOHMANN_JSON_NAMESPACE_END
+
+// #include 
+
+// #include 
+//     __ _____ _____ _____
+//  __|  |   __|     |   | |  JSON for Modern C++
+// |  |  |__   |  |  | | | |  version 3.11.3
+// |_____|_____|_____|_|___|  https://github.com/nlohmann/json
+//
+// SPDX-FileCopyrightText: 2013-2023 Niels Lohmann 
+// SPDX-FileCopyrightText: 2018 The Abseil Authors
+// SPDX-License-Identifier: MIT
+
+
+
+#include  // array
+#include  // size_t
+#include  // conditional, enable_if, false_type, integral_constant, is_constructible, is_integral, is_same, remove_cv, remove_reference, true_type
+#include  // index_sequence, make_index_sequence, index_sequence_for
+
+// #include 
+
+
+NLOHMANN_JSON_NAMESPACE_BEGIN
+namespace detail
+{
+
+template
+using uncvref_t = typename std::remove_cv::type>::type;
+
+#ifdef JSON_HAS_CPP_14
+
+// the following utilities are natively available in C++14
+using std::enable_if_t;
+using std::index_sequence;
+using std::make_index_sequence;
+using std::index_sequence_for;
+
+#else
+
+// alias templates to reduce boilerplate
+template
+using enable_if_t = typename std::enable_if::type;
+
+// The following code is taken from https://github.com/abseil/abseil-cpp/blob/10cb35e459f5ecca5b2ff107635da0bfa41011b4/absl/utility/utility.h
+// which is part of Google Abseil (https://github.com/abseil/abseil-cpp), licensed under the Apache License 2.0.
+
+//// START OF CODE FROM GOOGLE ABSEIL
+
+// integer_sequence
+//
+// Class template representing a compile-time integer sequence. An instantiation
+// of `integer_sequence` has a sequence of integers encoded in its
+// type through its template arguments (which is a common need when
+// working with C++11 variadic templates). `absl::integer_sequence` is designed
+// to be a drop-in replacement for C++14's `std::integer_sequence`.
+//
+// Example:
+//
+//   template< class T, T... Ints >
+//   void user_function(integer_sequence);
+//
+//   int main()
+//   {
+//     // user_function's `T` will be deduced to `int` and `Ints...`
+//     // will be deduced to `0, 1, 2, 3, 4`.
+//     user_function(make_integer_sequence());
+//   }
+template 
+struct integer_sequence
+{
+    using value_type = T;
+    static constexpr std::size_t size() noexcept
+    {
+        return sizeof...(Ints);
+    }
+};
+
+// index_sequence
+//
+// A helper template for an `integer_sequence` of `size_t`,
+// `absl::index_sequence` is designed to be a drop-in replacement for C++14's
+// `std::index_sequence`.
+template 
+using index_sequence = integer_sequence;
+
+namespace utility_internal
+{
+
+template 
+struct Extend;
+
+// Note that SeqSize == sizeof...(Ints). It's passed explicitly for efficiency.
+template 
+struct Extend, SeqSize, 0>
+{
+    using type = integer_sequence < T, Ints..., (Ints + SeqSize)... >;
+};
+
+template 
+struct Extend, SeqSize, 1>
+{
+    using type = integer_sequence < T, Ints..., (Ints + SeqSize)..., 2 * SeqSize >;
+};
+
+// Recursion helper for 'make_integer_sequence'.
+// 'Gen::type' is an alias for 'integer_sequence'.
+template 
+struct Gen
+{
+    using type =
+        typename Extend < typename Gen < T, N / 2 >::type, N / 2, N % 2 >::type;
+};
+
+template 
+struct Gen
+{
+    using type = integer_sequence;
+};
+
+}  // namespace utility_internal
+
+// Compile-time sequences of integers
+
+// make_integer_sequence
+//
+// This template alias is equivalent to
+// `integer_sequence`, and is designed to be a drop-in
+// replacement for C++14's `std::make_integer_sequence`.
+template 
+using make_integer_sequence = typename utility_internal::Gen::type;
+
+// make_index_sequence
+//
+// This template alias is equivalent to `index_sequence<0, 1, ..., N-1>`,
+// and is designed to be a drop-in replacement for C++14's
+// `std::make_index_sequence`.
+template 
+using make_index_sequence = make_integer_sequence;
+
+// index_sequence_for
+//
+// Converts a typename pack into an index sequence of the same length, and
+// is designed to be a drop-in replacement for C++14's
+// `std::index_sequence_for()`
+template 
+using index_sequence_for = make_index_sequence;
+
+//// END OF CODE FROM GOOGLE ABSEIL
+
+#endif
+
+// dispatch utility (taken from ranges-v3)
+template struct priority_tag : priority_tag < N - 1 > {};
+template<> struct priority_tag<0> {};
+
+// taken from ranges-v3
+template
+struct static_const
+{
+    static JSON_INLINE_VARIABLE constexpr T value{};
+};
+
+#ifndef JSON_HAS_CPP_17
+    template
+    constexpr T static_const::value;
+#endif
+
+template
+inline constexpr std::array make_array(Args&& ... args)
+{
+    return std::array {{static_cast(std::forward(args))...}};
+}
+
+}  // namespace detail
+NLOHMANN_JSON_NAMESPACE_END
+
+// #include 
+//     __ _____ _____ _____
+//  __|  |   __|     |   | |  JSON for Modern C++
+// |  |  |__   |  |  | | | |  version 3.11.3
+// |_____|_____|_____|_|___|  https://github.com/nlohmann/json
+//
+// SPDX-FileCopyrightText: 2013-2023 Niels Lohmann 
+// SPDX-License-Identifier: MIT
+
+
+
+#include  // numeric_limits
+#include  // false_type, is_constructible, is_integral, is_same, true_type
+#include  // declval
+#include  // tuple
+#include  // char_traits
+
+// #include 
+//     __ _____ _____ _____
+//  __|  |   __|     |   | |  JSON for Modern C++
+// |  |  |__   |  |  | | | |  version 3.11.3
+// |_____|_____|_____|_|___|  https://github.com/nlohmann/json
+//
+// SPDX-FileCopyrightText: 2013-2023 Niels Lohmann 
+// SPDX-License-Identifier: MIT
+
+
+
+#include  // random_access_iterator_tag
+
+// #include 
+
+// #include 
+
+// #include 
+
+
+NLOHMANN_JSON_NAMESPACE_BEGIN
+namespace detail
+{
+
+template
+struct iterator_types {};
+
+template
+struct iterator_types <
+    It,
+    void_t>
+{
+    using difference_type = typename It::difference_type;
+    using value_type = typename It::value_type;
+    using pointer = typename It::pointer;
+    using reference = typename It::reference;
+    using iterator_category = typename It::iterator_category;
+};
+
+// This is required as some compilers implement std::iterator_traits in a way that
+// doesn't work with SFINAE. See https://github.com/nlohmann/json/issues/1341.
+template
+struct iterator_traits
+{
+};
+
+template
+struct iterator_traits < T, enable_if_t < !std::is_pointer::value >>
+            : iterator_types
+{
+};
+
+template
+struct iterator_traits::value>>
+{
+    using iterator_category = std::random_access_iterator_tag;
+    using value_type = T;
+    using difference_type = ptrdiff_t;
+    using pointer = T*;
+    using reference = T&;
+};
+
+}  // namespace detail
+NLOHMANN_JSON_NAMESPACE_END
+
+// #include 
+
+// #include 
+//     __ _____ _____ _____
+//  __|  |   __|     |   | |  JSON for Modern C++
+// |  |  |__   |  |  | | | |  version 3.11.3
+// |_____|_____|_____|_|___|  https://github.com/nlohmann/json
+//
+// SPDX-FileCopyrightText: 2013-2023 Niels Lohmann 
+// SPDX-License-Identifier: MIT
+
+
+
+// #include 
+
+
+NLOHMANN_JSON_NAMESPACE_BEGIN
+
+NLOHMANN_CAN_CALL_STD_FUNC_IMPL(begin);
+
+NLOHMANN_JSON_NAMESPACE_END
+
+// #include 
+//     __ _____ _____ _____
+//  __|  |   __|     |   | |  JSON for Modern C++
+// |  |  |__   |  |  | | | |  version 3.11.3
+// |_____|_____|_____|_|___|  https://github.com/nlohmann/json
+//
+// SPDX-FileCopyrightText: 2013-2023 Niels Lohmann 
+// SPDX-License-Identifier: MIT
+
+
+
+// #include 
+
+
+NLOHMANN_JSON_NAMESPACE_BEGIN
+
+NLOHMANN_CAN_CALL_STD_FUNC_IMPL(end);
+
+NLOHMANN_JSON_NAMESPACE_END
+
+// #include 
+
+// #include 
+
+// #include 
+//     __ _____ _____ _____
+//  __|  |   __|     |   | |  JSON for Modern C++
+// |  |  |__   |  |  | | | |  version 3.11.3
+// |_____|_____|_____|_|___|  https://github.com/nlohmann/json
+//
+// SPDX-FileCopyrightText: 2013-2023 Niels Lohmann 
+// SPDX-License-Identifier: MIT
+
+#ifndef INCLUDE_NLOHMANN_JSON_FWD_HPP_
+    #define INCLUDE_NLOHMANN_JSON_FWD_HPP_
+
+    #include  // int64_t, uint64_t
+    #include  // map
+    #include  // allocator
+    #include  // string
+    #include  // vector
+
+    // #include 
+
+
+    /*!
+    @brief namespace for Niels Lohmann
+    @see https://github.com/nlohmann
+    @since version 1.0.0
+    */
+    NLOHMANN_JSON_NAMESPACE_BEGIN
+
+    /*!
+    @brief default JSONSerializer template argument
+
+    This serializer ignores the template arguments and uses ADL
+    ([argument-dependent lookup](https://en.cppreference.com/w/cpp/language/adl))
+    for serialization.
+    */
+    template
+    struct adl_serializer;
+
+    /// a class to store JSON values
+    /// @sa https://json.nlohmann.me/api/basic_json/
+    template class ObjectType =
+    std::map,
+    template class ArrayType = std::vector,
+    class StringType = std::string, class BooleanType = bool,
+    class NumberIntegerType = std::int64_t,
+    class NumberUnsignedType = std::uint64_t,
+    class NumberFloatType = double,
+    template class AllocatorType = std::allocator,
+    template class JSONSerializer =
+    adl_serializer,
+    class BinaryType = std::vector, // cppcheck-suppress syntaxError
+    class CustomBaseClass = void>
+    class basic_json;
+
+    /// @brief JSON Pointer defines a string syntax for identifying a specific value within a JSON document
+    /// @sa https://json.nlohmann.me/api/json_pointer/
+    template
+    class json_pointer;
+
+    /*!
+    @brief default specialization
+    @sa https://json.nlohmann.me/api/json/
+    */
+    using json = basic_json<>;
+
+    /// @brief a minimal map-like container that preserves insertion order
+    /// @sa https://json.nlohmann.me/api/ordered_map/
+    template
+    struct ordered_map;
+
+    /// @brief specialization that maintains the insertion order of object keys
+    /// @sa https://json.nlohmann.me/api/ordered_json/
+    using ordered_json = basic_json;
+
+    NLOHMANN_JSON_NAMESPACE_END
+
+#endif  // INCLUDE_NLOHMANN_JSON_FWD_HPP_
+
+
+NLOHMANN_JSON_NAMESPACE_BEGIN
+/*!
+@brief detail namespace with internal helper functions
+
+This namespace collects functions that should not be exposed,
+implementations of some @ref basic_json methods, and meta-programming helpers.
+
+@since version 2.1.0
+*/
+namespace detail
+{
+
+/////////////
+// helpers //
+/////////////
+
+// Note to maintainers:
+//
+// Every trait in this file expects a non CV-qualified type.
+// The only exceptions are in the 'aliases for detected' section
+// (i.e. those of the form: decltype(T::member_function(std::declval())))
+//
+// In this case, T has to be properly CV-qualified to constraint the function arguments
+// (e.g. to_json(BasicJsonType&, const T&))
+
+template struct is_basic_json : std::false_type {};
+
+NLOHMANN_BASIC_JSON_TPL_DECLARATION
+struct is_basic_json : std::true_type {};
+
+// used by exceptions create() member functions
+// true_type for pointer to possibly cv-qualified basic_json or std::nullptr_t
+// false_type otherwise
+template
+struct is_basic_json_context :
+    std::integral_constant < bool,
+    is_basic_json::type>::type>::value
+    || std::is_same::value >
+{};
+
+//////////////////////
+// json_ref helpers //
+//////////////////////
+
+template
+class json_ref;
+
+template
+struct is_json_ref : std::false_type {};
+
+template
+struct is_json_ref> : std::true_type {};
+
+//////////////////////////
+// aliases for detected //
+//////////////////////////
+
+template
+using mapped_type_t = typename T::mapped_type;
+
+template
+using key_type_t = typename T::key_type;
+
+template
+using value_type_t = typename T::value_type;
+
+template
+using difference_type_t = typename T::difference_type;
+
+template
+using pointer_t = typename T::pointer;
+
+template
+using reference_t = typename T::reference;
+
+template
+using iterator_category_t = typename T::iterator_category;
+
+template
+using to_json_function = decltype(T::to_json(std::declval()...));
+
+template
+using from_json_function = decltype(T::from_json(std::declval()...));
+
+template
+using get_template_function = decltype(std::declval().template get());
+
+// trait checking if JSONSerializer::from_json(json const&, udt&) exists
+template
+struct has_from_json : std::false_type {};
+
+// trait checking if j.get is valid
+// use this trait instead of std::is_constructible or std::is_convertible,
+// both rely on, or make use of implicit conversions, and thus fail when T
+// has several constructors/operator= (see https://github.com/nlohmann/json/issues/958)
+template 
+struct is_getable
+{
+    static constexpr bool value = is_detected::value;
+};
+
+template
+struct has_from_json < BasicJsonType, T, enable_if_t < !is_basic_json::value >>
+{
+    using serializer = typename BasicJsonType::template json_serializer;
+
+    static constexpr bool value =
+        is_detected_exact::value;
+};
+
+// This trait checks if JSONSerializer::from_json(json const&) exists
+// this overload is used for non-default-constructible user-defined-types
+template
+struct has_non_default_from_json : std::false_type {};
+
+template
+struct has_non_default_from_json < BasicJsonType, T, enable_if_t < !is_basic_json::value >>
+{
+    using serializer = typename BasicJsonType::template json_serializer;
+
+    static constexpr bool value =
+        is_detected_exact::value;
+};
+
+// This trait checks if BasicJsonType::json_serializer::to_json exists
+// Do not evaluate the trait when T is a basic_json type, to avoid template instantiation infinite recursion.
+template
+struct has_to_json : std::false_type {};
+
+template
+struct has_to_json < BasicJsonType, T, enable_if_t < !is_basic_json::value >>
+{
+    using serializer = typename BasicJsonType::template json_serializer;
+
+    static constexpr bool value =
+        is_detected_exact::value;
+};
+
+template
+using detect_key_compare = typename T::key_compare;
+
+template
+struct has_key_compare : std::integral_constant::value> {};
+
+// obtains the actual object key comparator
+template
+struct actual_object_comparator
+{
+    using object_t = typename BasicJsonType::object_t;
+    using object_comparator_t = typename BasicJsonType::default_object_comparator_t;
+    using type = typename std::conditional < has_key_compare::value,
+          typename object_t::key_compare, object_comparator_t>::type;
+};
+
+template
+using actual_object_comparator_t = typename actual_object_comparator::type;
+
+/////////////////
+// char_traits //
+/////////////////
+
+// Primary template of char_traits calls std char_traits
+template
+struct char_traits : std::char_traits
+{};
+
+// Explicitly define char traits for unsigned char since it is not standard
+template<>
+struct char_traits : std::char_traits
+{
+    using char_type = unsigned char;
+    using int_type = uint64_t;
+
+    // Redefine to_int_type function
+    static int_type to_int_type(char_type c) noexcept
+    {
+        return static_cast(c);
+    }
+
+    static char_type to_char_type(int_type i) noexcept
+    {
+        return static_cast(i);
+    }
+
+    static constexpr int_type eof() noexcept
+    {
+        return static_cast(EOF);
+    }
+};
+
+// Explicitly define char traits for signed char since it is not standard
+template<>
+struct char_traits : std::char_traits
+{
+    using char_type = signed char;
+    using int_type = uint64_t;
+
+    // Redefine to_int_type function
+    static int_type to_int_type(char_type c) noexcept
+    {
+        return static_cast(c);
+    }
+
+    static char_type to_char_type(int_type i) noexcept
+    {
+        return static_cast(i);
+    }
+
+    static constexpr int_type eof() noexcept
+    {
+        return static_cast(EOF);
+    }
+};
+
+///////////////////
+// is_ functions //
+///////////////////
+
+// https://en.cppreference.com/w/cpp/types/conjunction
+template struct conjunction : std::true_type { };
+template struct conjunction : B { };
+template
+struct conjunction
+: std::conditional(B::value), conjunction, B>::type {};
+
+// https://en.cppreference.com/w/cpp/types/negation
+template struct negation : std::integral_constant < bool, !B::value > { };
+
+// Reimplementation of is_constructible and is_default_constructible, due to them being broken for
+// std::pair and std::tuple until LWG 2367 fix (see https://cplusplus.github.io/LWG/lwg-defects.html#2367).
+// This causes compile errors in e.g. clang 3.5 or gcc 4.9.
+template 
+struct is_default_constructible : std::is_default_constructible {};
+
+template 
+struct is_default_constructible>
+            : conjunction, is_default_constructible> {};
+
+template 
+struct is_default_constructible>
+            : conjunction, is_default_constructible> {};
+
+template 
+struct is_default_constructible>
+            : conjunction...> {};
+
+template 
+struct is_default_constructible>
+            : conjunction...> {};
+
+template 
+struct is_constructible : std::is_constructible {};
+
+template 
+struct is_constructible> : is_default_constructible> {};
+
+template 
+struct is_constructible> : is_default_constructible> {};
+
+template 
+struct is_constructible> : is_default_constructible> {};
+
+template 
+struct is_constructible> : is_default_constructible> {};
+
+template
+struct is_iterator_traits : std::false_type {};
+
+template
+struct is_iterator_traits>
+{
+  private:
+    using traits = iterator_traits;
+
+  public:
+    static constexpr auto value =
+        is_detected::value &&
+        is_detected::value &&
+        is_detected::value &&
+        is_detected::value &&
+        is_detected::value;
+};
+
+template
+struct is_range
+{
+  private:
+    using t_ref = typename std::add_lvalue_reference::type;
+
+    using iterator = detected_t;
+    using sentinel = detected_t;
+
+    // to be 100% correct, it should use https://en.cppreference.com/w/cpp/iterator/input_or_output_iterator
+    // and https://en.cppreference.com/w/cpp/iterator/sentinel_for
+    // but reimplementing these would be too much work, as a lot of other concepts are used underneath
+    static constexpr auto is_iterator_begin =
+        is_iterator_traits>::value;
+
+  public:
+    static constexpr bool value = !std::is_same::value && !std::is_same::value && is_iterator_begin;
+};
+
+template
+using iterator_t = enable_if_t::value, result_of_begin())>>;
+
+template
+using range_value_t = value_type_t>>;
+
+// The following implementation of is_complete_type is taken from
+// https://blogs.msdn.microsoft.com/vcblog/2015/12/02/partial-support-for-expression-sfinae-in-vs-2015-update-1/
+// and is written by Xiang Fan who agreed to using it in this library.
+
+template
+struct is_complete_type : std::false_type {};
+
+template
+struct is_complete_type : std::true_type {};
+
+template
+struct is_compatible_object_type_impl : std::false_type {};
+
+template
+struct is_compatible_object_type_impl <
+    BasicJsonType, CompatibleObjectType,
+    enable_if_t < is_detected::value&&
+    is_detected::value >>
+{
+    using object_t = typename BasicJsonType::object_t;
+
+    // macOS's is_constructible does not play well with nonesuch...
+    static constexpr bool value =
+        is_constructible::value &&
+        is_constructible::value;
+};
+
+template
+struct is_compatible_object_type
+    : is_compatible_object_type_impl {};
+
+template
+struct is_constructible_object_type_impl : std::false_type {};
+
+template
+struct is_constructible_object_type_impl <
+    BasicJsonType, ConstructibleObjectType,
+    enable_if_t < is_detected::value&&
+    is_detected::value >>
+{
+    using object_t = typename BasicJsonType::object_t;
+
+    static constexpr bool value =
+        (is_default_constructible::value &&
+         (std::is_move_assignable::value ||
+          std::is_copy_assignable::value) &&
+         (is_constructible::value &&
+          std::is_same <
+          typename object_t::mapped_type,
+          typename ConstructibleObjectType::mapped_type >::value)) ||
+        (has_from_json::value ||
+         has_non_default_from_json <
+         BasicJsonType,
+         typename ConstructibleObjectType::mapped_type >::value);
+};
+
+template
+struct is_constructible_object_type
+    : is_constructible_object_type_impl {};
+
+template
+struct is_compatible_string_type
+{
+    static constexpr auto value =
+        is_constructible::value;
+};
+
+template
+struct is_constructible_string_type
+{
+    // launder type through decltype() to fix compilation failure on ICPC
+#ifdef __INTEL_COMPILER
+    using laundered_type = decltype(std::declval());
+#else
+    using laundered_type = ConstructibleStringType;
+#endif
+
+    static constexpr auto value =
+        conjunction <
+        is_constructible,
+        is_detected_exact>::value;
+};
+
+template
+struct is_compatible_array_type_impl : std::false_type {};
+
+template
+struct is_compatible_array_type_impl <
+    BasicJsonType, CompatibleArrayType,
+    enable_if_t <
+    is_detected::value&&
+    is_iterator_traits>>::value&&
+// special case for types like std::filesystem::path whose iterator's value_type are themselves
+// c.f. https://github.com/nlohmann/json/pull/3073
+    !std::is_same>::value >>
+{
+    static constexpr bool value =
+        is_constructible>::value;
+};
+
+template
+struct is_compatible_array_type
+    : is_compatible_array_type_impl {};
+
+template
+struct is_constructible_array_type_impl : std::false_type {};
+
+template
+struct is_constructible_array_type_impl <
+    BasicJsonType, ConstructibleArrayType,
+    enable_if_t::value >>
+            : std::true_type {};
+
+template
+struct is_constructible_array_type_impl <
+    BasicJsonType, ConstructibleArrayType,
+    enable_if_t < !std::is_same::value&&
+    !is_compatible_string_type::value&&
+    is_default_constructible::value&&
+(std::is_move_assignable::value ||
+ std::is_copy_assignable::value)&&
+is_detected::value&&
+is_iterator_traits>>::value&&
+is_detected::value&&
+// special case for types like std::filesystem::path whose iterator's value_type are themselves
+// c.f. https://github.com/nlohmann/json/pull/3073
+!std::is_same>::value&&
+        is_complete_type <
+        detected_t>::value >>
+{
+    using value_type = range_value_t;
+
+    static constexpr bool value =
+        std::is_same::value ||
+        has_from_json::value ||
+        has_non_default_from_json <
+        BasicJsonType,
+        value_type >::value;
+};
+
+template
+struct is_constructible_array_type
+    : is_constructible_array_type_impl {};
+
+template
+struct is_compatible_integer_type_impl : std::false_type {};
+
+template
+struct is_compatible_integer_type_impl <
+    RealIntegerType, CompatibleNumberIntegerType,
+    enable_if_t < std::is_integral::value&&
+    std::is_integral::value&&
+    !std::is_same::value >>
+{
+    // is there an assert somewhere on overflows?
+    using RealLimits = std::numeric_limits;
+    using CompatibleLimits = std::numeric_limits;
+
+    static constexpr auto value =
+        is_constructible::value &&
+        CompatibleLimits::is_integer &&
+        RealLimits::is_signed == CompatibleLimits::is_signed;
+};
+
+template
+struct is_compatible_integer_type
+    : is_compatible_integer_type_impl {};
+
+template
+struct is_compatible_type_impl: std::false_type {};
+
+template
+struct is_compatible_type_impl <
+    BasicJsonType, CompatibleType,
+    enable_if_t::value >>
+{
+    static constexpr bool value =
+        has_to_json::value;
+};
+
+template
+struct is_compatible_type
+    : is_compatible_type_impl {};
+
+template
+struct is_constructible_tuple : std::false_type {};
+
+template
+struct is_constructible_tuple> : conjunction...> {};
+
+template
+struct is_json_iterator_of : std::false_type {};
+
+template
+struct is_json_iterator_of : std::true_type {};
+
+template
+struct is_json_iterator_of : std::true_type
+{};
+
+// checks if a given type T is a template specialization of Primary
+template