From 8e30926c5ffabde21836c3279ff7414b33d22435 Mon Sep 17 00:00:00 2001 From: Christian Hudon Date: Thu, 16 Jan 2025 13:11:35 -0500 Subject: [PATCH] Quick doc fixes (#117) --- LICENSE | 2 +- docs/license.md | 2 +- docs/quick-start.md | 6 +++--- mkdocs.yaml | 2 +- 4 files changed, 6 insertions(+), 6 deletions(-) diff --git a/LICENSE b/LICENSE index 877bdac7..94cf1025 100644 --- a/LICENSE +++ b/LICENSE @@ -178,7 +178,7 @@ The following applies to all files unless otherwise noted: END OF TERMS AND CONDITIONS - Copyright 2024 ServiceNow, Inc. + Copyright 2024-2025 ServiceNow, Inc. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. diff --git a/docs/license.md b/docs/license.md index eb39eeda..c6c18c63 100644 --- a/docs/license.md +++ b/docs/license.md @@ -5,7 +5,7 @@ title: License Fast-LLM is licenced under the Apache 2.0 license: ```text -Copyright 2024 ServiceNow, Inc. +Copyright 2024-2025 ServiceNow, Inc. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. diff --git a/docs/quick-start.md b/docs/quick-start.md index 56189d0c..abecc96c 100644 --- a/docs/quick-start.md +++ b/docs/quick-start.md @@ -49,7 +49,7 @@ Now, select the compute environment that matches your setup or preferred workflo Install Python 3.12 (or later) if it's not already available on your system. For a Python virtual environment, run: ```bash - python3.10 -m venv ./fast-llm-tutorial/venv + python3.12 -m venv ./fast-llm-tutorial/venv source ./fast-llm-tutorial/venv/bin/activate pip install --upgrade pip ``` @@ -202,11 +202,11 @@ Choose based on your goals for this tutorial. === "Big" - For the big configuration, we'll use a Llama model with 8B parameters. We'll grab the model from the Huggingface Hub and save it to our inputs folder. + For the big configuration, we'll use a Llama model with 8B parameters. We'll grab the model from the HuggingFace Hub and save it to our inputs folder. !!! note "Access Required" - Meta gates access to their Llama models. You need to request access to the model from Meta before you can download it at https://huggingface.co/meta-llama/Llama-3.1-8B. You'll need to authenticate with your Hugging Face account to download the model: + Meta gates access to their Llama models. You need to request access to the model from Meta before you can download it at https://huggingface.co/meta-llama/Llama-3.1-8B. You'll need to authenticate with your HuggingFace account to download the model: ```bash pip install huggingface_hub diff --git a/mkdocs.yaml b/mkdocs.yaml index c566b6cd..1d3a0892 100644 --- a/mkdocs.yaml +++ b/mkdocs.yaml @@ -12,7 +12,7 @@ repo_url: https://github.com/ServiceNow/Fast-LLM edit_uri: edit/main/docs/ # Copyright -copyright: Copyright 2024 ServiceNow, Inc. +copyright: Copyright 2024-2025 ServiceNow, Inc. # Configuration theme: